From b1d65e3ab9cea77114878e2f45de9f7f03b230f6 Mon Sep 17 00:00:00 2001 From: Ricky Chau Date: Wed, 5 Apr 2023 16:44:06 -0400 Subject: [PATCH 01/23] feat(cdk): initial commit --- .bandit.yaml | 18 + .coveragerc | 26 + .editorconfig | 31 + .gitattributes | 16 + .gitignore | 366 +++ .gitlint | 21 + .python-version | 1 + .terraform-version | 1 + LICENSE | 201 ++ Makefile | 560 +++++ Pipfile | 57 + Pipfile.lock | 1977 +++++++++++++++++ README.md | 0 cdk/.gitignore | 10 + cdk/README.md | 58 + cdk/app.py | 28 + cdk/cdk.json | 49 + cdk/cdk/__init__.py | 0 cdk/cdk/cdk_stack.py | 37 + cdk/requirements-dev.txt | 1 + cdk/requirements.txt | 2 + cdk/source.bat | 13 + cdk/tests/__init__.py | 0 cdk/tests/unit/__init__.py | 0 cdk/tests/unit/test_cdk_stack.py | 15 + docs/faq.md | 3 + docs/install.md | 74 + docs/usage/development.md | 0 docs/usage/getting-started.md | 3 + mkdocs.yml | 62 + mypy.ini | 25 + pyproject.toml | 124 ++ rdk/__init__.py | 11 + rdk/__main__.py | 8 + rdk/cli/__init__.py | 0 rdk/cli/commands/__init__.py | 0 rdk/cli/commands/deploy.py | 17 + rdk/cli/commands/init.py | 15 + rdk/cli/main.py | 115 + rdk/core/__init__.py | 0 rdk/core/errors.py | 135 ++ rdk/core/rules_deploy.py | 53 + rdk/py.typed | 0 rdk/runners/__init__.py | 0 rdk/runners/base.py | 217 ++ rdk/runners/cdk.py | 53 + rdk/utils/__init__.py | 0 rdk/utils/logger.py | 151 ++ rdk/version.py | 1 + requirements.txt | 35 + setup.py | 43 + sonar-project.properties | 38 + tests/__init__.py | 0 tests/unit/__init__.py | 0 tests/unit/cli/__init__.py | 0 tests/unit/cli/command/__init__.py | 0 tests/unit/cli/command/test_init.py | 14 + tests/unit/cli/conftest.py | 8 + tests/unit/conftest.py | 15 + tests/unit/core/__init__.py | 0 tests/unit/core/conftest.py | 8 + tests/unit/core/data/my-pytest.py | 0 tests/unit/core/data/plan-bad-01.yaml | 23 + tests/unit/core/data/plan-bad-02.yaml | 11 + tests/unit/core/data/plan-bad-03.yaml | 7 + tests/unit/core/data/plan-bad-04.yaml | 11 + tests/unit/core/data/plan-bad-05.yaml | 11 + tests/unit/core/data/plan-bad-06.yaml | 13 + tests/unit/core/data/plan-bad-07.yaml | 13 + tests/unit/core/data/plan-bad-08.yaml | 11 + tests/unit/core/data/plan-bad-09.yaml | 9 + tests/unit/core/data/plan-bad-10.yaml | 11 + tests/unit/core/data/plan-bad-11.yaml | 11 + tests/unit/core/data/plan-good-01.yaml | 46 + tests/unit/core/data/plan-good-02.yaml | 13 + tests/unit/core/data/policy.yaml | 20 + tests/unit/core/data/runtime-files/file1.json | 3 + tests/unit/core/data/runtime-files/file2.json | 3 + tests/unit/core/data/tf-root-module/main.tf | 0 .../tf-root-module/my-net-change-1.tfvars | 0 .../tf-root-module/my-net-change-2.tfvars | 0 .../core/data/tf-root-module/my-var-1.tfvars | 0 .../core/data/tf-root-module/my-var-2.tfvars | 0 tests/unit/core/test_errors.py | 40 + tests/unit/runners/__init__.py | 0 tests/unit/runners/conftest.py | 11 + tests/unit/runners/test_base.py | 130 ++ tests/unit/test_pkg_metadata.py | 18 + tests/unit/utils/__init__.py | 0 tests/unit/utils/conftest.py | 8 + .../unit/utils/data/runtime-files/file1.json | 3 + .../unit/utils/data/runtime-files/file2.json | 3 + tests/unit/utils/test_logger.py | 180 ++ tools/ci/bin/init-snapshot.sh | 83 + tools/ci/bin/install-tools-on-jenkins.sh | 49 + tools/docs/bin/generate-ref-cli.sh | 54 + tools/githooks/bin/commit-msg | 41 + tools/githooks/bin/pre-push | 77 + tools/githooks/etc/commit-template | 32 + twine.pypirc | 17 + 100 files changed, 5678 insertions(+) create mode 100644 .bandit.yaml create mode 100644 .coveragerc create mode 100644 .editorconfig create mode 100644 .gitattributes create mode 100644 .gitignore create mode 100644 .gitlint create mode 100644 .python-version create mode 100644 .terraform-version create mode 100644 LICENSE create mode 100644 Makefile create mode 100644 Pipfile create mode 100644 Pipfile.lock create mode 100644 README.md create mode 100644 cdk/.gitignore create mode 100644 cdk/README.md create mode 100644 cdk/app.py create mode 100644 cdk/cdk.json create mode 100644 cdk/cdk/__init__.py create mode 100644 cdk/cdk/cdk_stack.py create mode 100644 cdk/requirements-dev.txt create mode 100644 cdk/requirements.txt create mode 100644 cdk/source.bat create mode 100644 cdk/tests/__init__.py create mode 100644 cdk/tests/unit/__init__.py create mode 100644 cdk/tests/unit/test_cdk_stack.py create mode 100644 docs/faq.md create mode 100644 docs/install.md create mode 100644 docs/usage/development.md create mode 100644 docs/usage/getting-started.md create mode 100644 mkdocs.yml create mode 100644 mypy.ini create mode 100644 pyproject.toml create mode 100644 rdk/__init__.py create mode 100644 rdk/__main__.py create mode 100644 rdk/cli/__init__.py create mode 100644 rdk/cli/commands/__init__.py create mode 100644 rdk/cli/commands/deploy.py create mode 100644 rdk/cli/commands/init.py create mode 100644 rdk/cli/main.py create mode 100644 rdk/core/__init__.py create mode 100644 rdk/core/errors.py create mode 100644 rdk/core/rules_deploy.py create mode 100644 rdk/py.typed create mode 100644 rdk/runners/__init__.py create mode 100644 rdk/runners/base.py create mode 100644 rdk/runners/cdk.py create mode 100644 rdk/utils/__init__.py create mode 100644 rdk/utils/logger.py create mode 100644 rdk/version.py create mode 100644 requirements.txt create mode 100644 setup.py create mode 100644 sonar-project.properties create mode 100644 tests/__init__.py create mode 100644 tests/unit/__init__.py create mode 100644 tests/unit/cli/__init__.py create mode 100644 tests/unit/cli/command/__init__.py create mode 100644 tests/unit/cli/command/test_init.py create mode 100644 tests/unit/cli/conftest.py create mode 100644 tests/unit/conftest.py create mode 100644 tests/unit/core/__init__.py create mode 100644 tests/unit/core/conftest.py create mode 100644 tests/unit/core/data/my-pytest.py create mode 100644 tests/unit/core/data/plan-bad-01.yaml create mode 100644 tests/unit/core/data/plan-bad-02.yaml create mode 100644 tests/unit/core/data/plan-bad-03.yaml create mode 100644 tests/unit/core/data/plan-bad-04.yaml create mode 100644 tests/unit/core/data/plan-bad-05.yaml create mode 100644 tests/unit/core/data/plan-bad-06.yaml create mode 100644 tests/unit/core/data/plan-bad-07.yaml create mode 100644 tests/unit/core/data/plan-bad-08.yaml create mode 100644 tests/unit/core/data/plan-bad-09.yaml create mode 100644 tests/unit/core/data/plan-bad-10.yaml create mode 100644 tests/unit/core/data/plan-bad-11.yaml create mode 100644 tests/unit/core/data/plan-good-01.yaml create mode 100644 tests/unit/core/data/plan-good-02.yaml create mode 100644 tests/unit/core/data/policy.yaml create mode 100644 tests/unit/core/data/runtime-files/file1.json create mode 100644 tests/unit/core/data/runtime-files/file2.json create mode 100644 tests/unit/core/data/tf-root-module/main.tf create mode 100644 tests/unit/core/data/tf-root-module/my-net-change-1.tfvars create mode 100644 tests/unit/core/data/tf-root-module/my-net-change-2.tfvars create mode 100644 tests/unit/core/data/tf-root-module/my-var-1.tfvars create mode 100644 tests/unit/core/data/tf-root-module/my-var-2.tfvars create mode 100644 tests/unit/core/test_errors.py create mode 100644 tests/unit/runners/__init__.py create mode 100644 tests/unit/runners/conftest.py create mode 100644 tests/unit/runners/test_base.py create mode 100644 tests/unit/test_pkg_metadata.py create mode 100644 tests/unit/utils/__init__.py create mode 100644 tests/unit/utils/conftest.py create mode 100644 tests/unit/utils/data/runtime-files/file1.json create mode 100644 tests/unit/utils/data/runtime-files/file2.json create mode 100644 tests/unit/utils/test_logger.py create mode 100755 tools/ci/bin/init-snapshot.sh create mode 100644 tools/ci/bin/install-tools-on-jenkins.sh create mode 100644 tools/docs/bin/generate-ref-cli.sh create mode 100755 tools/githooks/bin/commit-msg create mode 100755 tools/githooks/bin/pre-push create mode 100644 tools/githooks/etc/commit-template create mode 100644 twine.pypirc diff --git a/.bandit.yaml b/.bandit.yaml new file mode 100644 index 00000000..8d65e110 --- /dev/null +++ b/.bandit.yaml @@ -0,0 +1,18 @@ +############################################################################### +# BANDIT CONFIGURATIONS +############################################################################### + +# +# Reference: +# https://bandit.readthedocs.io/en/latest/config.html +# + +# Excluded path globs +# exclude_dirs: + +# Allow the use of assert in tests +assert_used: + skips: + - test_*.py + +############################################################################### diff --git a/.coveragerc b/.coveragerc new file mode 100644 index 00000000..d6fdb661 --- /dev/null +++ b/.coveragerc @@ -0,0 +1,26 @@ +################################################################################ +# PYTHON COVERAGE CONFIGURATIONS +################################################################################ + +# +# Reference: +# https://coverage.readthedocs.io/en/latest/config.html +# + +[run] +branch = True + +[report] +fail_under = 90 + +[html] +directory = .reports/coverage-html +title = Coverage Report for rdk + +[xml] +output = .reports/coverage.xml + +[json] +output = .reports/coverage.json + +################################################################################ diff --git a/.editorconfig b/.editorconfig new file mode 100644 index 00000000..67778693 --- /dev/null +++ b/.editorconfig @@ -0,0 +1,31 @@ +############################################################################### +# .editorconfig +############################################################################### + +# Configure your IDE + +# Documentation: +# http://editorconfig.org/ + +# top-most EditorConfig file +root = true + +# Configurations for all files +[*] +charset = utf-8 +end_of_line = lf +indent_size = 2 +indent_style = space +insert_final_newline = true +trim_trailing_whitespace = true + +# Makefiles +[{Makefile,*.Makefile}] +indent_size = 4 +indent_style = tab + +# Python +[*.py] +indent_size = 4 + +############################################################################### diff --git a/.gitattributes b/.gitattributes new file mode 100644 index 00000000..76ca1b4d --- /dev/null +++ b/.gitattributes @@ -0,0 +1,16 @@ +############################################################################### +# .gitattributes +############################################################################### + +# gitattributes - defining attributes per path + +# Documentation: +# https://git-scm.com/docs/gitattributes + +# Useful Templates +# https://github.com/alexkaratarakis/gitattributes + +# Everything in this repo is a text file +* text eol=lf + +############################################################################### diff --git a/.gitignore b/.gitignore new file mode 100644 index 00000000..6ee6b8c0 --- /dev/null +++ b/.gitignore @@ -0,0 +1,366 @@ +############################################################################### +# .gitignore +############################################################################### + +# Configure git to ignore things + +# Documentation: +# https://git-scm.com/docs/gitignore + +# Useful Templates +# https://github.com/github/gitignore + +# ----------------------------------------------------------------------------- +# BACKUPS +# ----------------------------------------------------------------------------- + +# https://github.com/github/gitignore/blob/master/Global/Backup.gitignore + +*.bak +*.gho +*.ori +*.orig +*.tmp + +# ----------------------------------------------------------------------------- +# MACOS +# ----------------------------------------------------------------------------- + +# https://github.com/github/gitignore/blob/master/Global/macOS.gitignore + +# General +.DS_Store +.AppleDouble +.LSOverride + +# Icon must end with two \r +Icon + + +# Thumbnails +._* + +# Files that might appear in the root of a volume +.DocumentRevisions-V100 +.fseventsd +.Spotlight-V100 +.TemporaryItems +.Trashes +.VolumeIcon.icns +.com.apple.timemachine.donotpresent + +# Directories potentially created on remote AFP share +.AppleDB +.AppleDesktop +Network Trash Folder +Temporary Items +.apdisk + +# ----------------------------------------------------------------------------- +# LINUX +# ----------------------------------------------------------------------------- + +# https://github.com/github/gitignore/blob/master/Global/Linux.gitignore + +*~ + +# temporary files which can be created if a process still has a handle open of a deleted file +.fuse_hidden* + +# KDE directory preferences +.directory + +# Linux trash folder which might appear on any partition or disk +.Trash-* + +# .nfs files are created when an open file is removed but is still being accessed +.nfs* + +# ----------------------------------------------------------------------------- +# WINDOWS +# ----------------------------------------------------------------------------- + +# https://github.com/github/gitignore/blob/master/Global/Windows.gitignore + +# Windows thumbnail cache files +Thumbs.db +Thumbs.db:encryptable +ehthumbs.db +ehthumbs_vista.db + +# Dump file +*.stackdump + +# Folder config file +[Dd]esktop.ini + +# Recycle Bin used on file shares +$RECYCLE.BIN/ + +# Windows Installer files +*.cab +*.msi +*.msix +*.msm +*.msp + +# Windows shortcuts +*.lnk + +# ----------------------------------------------------------------------------- +# VIM +# ----------------------------------------------------------------------------- + +# https://github.com/github/gitignore/blob/master/Global/Vim.gitignore + +# Swap +[._]*.s[a-v][a-z] +!*.svg # comment out if you don't need vector files +[._]*.sw[a-p] +[._]s[a-rt-v][a-z] +[._]ss[a-gi-z] +[._]sw[a-p] + +# Session +Session.vim +Sessionx.vim + +# Temporary +.netrwhist +*~ +# Auto-generated tag files +tags +# Persistent undo +[._]*.un~ + +# ----------------------------------------------------------------------------- +# VSCODE +# ----------------------------------------------------------------------------- + +# https://github.com/github/gitignore/blob/master/Global/VisualStudioCode.gitignore + +.vscode +# !.vscode/settings.json +# !.vscode/tasks.json +# !.vscode/launch.json +# !.vscode/extensions.json +*.code-workspace + +# ----------------------------------------------------------------------------- +# PYTHON SPECIFIC +# ----------------------------------------------------------------------------- + +# https://github.com/github/gitignore/blob/master/Python.gitignore + +# Byte-compiled / optimized / DLL files +__pycache__/ +*.py[cod] +*$py.class + +# C extensions +*.so + +# Distribution / packaging +.Python +build/ +develop-eggs/ +dist/ +downloads/ +eggs/ +.eggs/ +lib/ +lib64/ +parts/ +sdist/ +var/ +wheels/ +pip-wheel-metadata/ +share/python-wheels/ +*.egg-info/ +.installed.cfg +*.egg +MANIFEST + +# PyInstaller +# Usually these files are written by a python script from a template +# before PyInstaller builds the exe, so as to inject date/other infos into it. +*.manifest +*.spec + +# Installer logs +pip-log.txt +pip-delete-this-directory.txt + +# Unit test / coverage reports +htmlcov/ +.tox/ +.nox/ +.coverage +.coverage.* +.cache +nosetests.xml +coverage.xml +*.cover +*.py,cover +.hypothesis/ +.pytest_cache/ +cover/ + +# Translations +*.mo +*.pot + +# Django stuff: +*.log +local_settings.py +db.sqlite3 +db.sqlite3-journal + +# Flask stuff: +instance/ +.webassets-cache + +# Scrapy stuff: +.scrapy + +# Sphinx documentation +docs/_build/ + +# PyBuilder +.pybuilder/ +target/ + +# Jupyter Notebook +.ipynb_checkpoints + +# IPython +profile_default/ +ipython_config.py + +# pyenv +# For a library or package, you might want to ignore these files since the code is +# intended to run in multiple environments; otherwise, check them in: +# .python-version + +# pipenv +# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. +# However, in case of collaboration, if having platform-specific dependencies or dependencies +# having no cross-platform support, pipenv may install dependencies that don't work, or not +# install all needed dependencies. +#Pipfile.lock + +# PEP 582; used by e.g. github.com/David-OConnor/pyflow +__pypackages__/ + +# Celery stuff +celerybeat-schedule +celerybeat.pid + +# SageMath parsed files +*.sage.py + +# Environments +.env +.venv +env/ +venv/ +ENV/ +env.bak/ +venv.bak/ + +# Spyder project settings +.spyderproject +.spyproject + +# Rope project settings +.ropeproject + +# mkdocs documentation +/site + +# mypy +.mypy_cache/ +.dmypy.json +dmypy.json + +# Pyre type checker +.pyre/ + +# pytype static type analyzer +.pytype/ + +# Cython debug symbols +cython_debug/ + +# static files generated from Django application using `collectstatic` +media +static + +# ----------------------------------------------------------------------------- +# TERRAFORM SPECIFIC +# ----------------------------------------------------------------------------- + +# Local .terraform directories +**/.terraform/* +**/terraform.d/* + +# .tfstate files +*.tfstate +*.tfstate.* + +# Crash log files +crash.log + +# Exclude all .tfvars files, which are likely to contain sentitive data, such as +# password, private keys, and other secrets. These should not be part of version +# control as they are data points which are potentially sensitive and subject +# to change depending on the environment. +# +terraform.tfvars +*.auto.tfvars + +# Ignore override files as they are usually used to override resources locally and so +# are not checked in +override.tf +override.tf.json +*_override.tf +*_override.tf.json + +# Include override files you do wish to add to version control using negated pattern +# +# !example_override.tf + +# Include tfplan files to ignore the plan output of command: terraform plan -out=tfplan +# example: *tfplan* +*tfplan* + +# Ignore CLI configuration files +.terraformrc +terraform.rc + +# ----------------------------------------------------------------------------- +# APPLICATION SPECIFIC +# ----------------------------------------------------------------------------- + +# pipenv stuff +reqlib-metadata/ + +# Test and lint reports +.reports/ + +# Tool output dir +.rdk/ + +# Sonar +.sonar/ +.scannerwork/ + +# Terraform +.terraform.d/ + +# Docs (generated) +docs/reference/api/ +docs/reference/cli.md +docs/CHANGELOG.md + +############################################################################### diff --git a/.gitlint b/.gitlint new file mode 100644 index 00000000..e0bb6b60 --- /dev/null +++ b/.gitlint @@ -0,0 +1,21 @@ +############################################################################### +# gitlint CONFIGURATIONS +############################################################################### + +# +# Reference: +# https://jorisroovers.com/gitlint/configuration/ +# + +[general] +verbosity = 3 +contrib = contrib-title-conventional-commits +ignore = body-is-missing + +[title-max-length] +line-length = 72 + +[body-max-line-length] +line-length = 80 + +############################################################################### diff --git a/.python-version b/.python-version new file mode 100644 index 00000000..54c5196a --- /dev/null +++ b/.python-version @@ -0,0 +1 @@ +3.10.9 diff --git a/.terraform-version b/.terraform-version new file mode 100644 index 00000000..90a27f9c --- /dev/null +++ b/.terraform-version @@ -0,0 +1 @@ +1.0.5 diff --git a/LICENSE b/LICENSE new file mode 100644 index 00000000..261eeb9e --- /dev/null +++ b/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/Makefile b/Makefile new file mode 100644 index 00000000..abd26e24 --- /dev/null +++ b/Makefile @@ -0,0 +1,560 @@ +############################################################################### +# Makefile +############################################################################### + +# +# https://www.gnu.org/software/make/manual/html_node/index.html +# https://www.gnu.org/software/make/manual/html_node/Quick-Reference.html#Quick-Reference +# + +# ----------------------------------------------------------------------------- +# MAKE CONFIGURATIONS +# ----------------------------------------------------------------------------- + +# Default Shell is bash, with errors +SHELL := /usr/bin/env bash +.SHELLFLAGS := -eu -o pipefail -c + +# Do not run in parallel +.NOTPARALLEL: + +# ----------------------------------------------------------------------------- +# CHECK PRE-REQS +# ----------------------------------------------------------------------------- + +# This is intended to run as early as possible to ensure that various things +# that is Makefile depends on is available. + +override prereq_binaries := git python3 pipenv +$(foreach bin,$(prereq_binaries),\ + $(if $(shell command -v $(bin) 2>/dev/null),,\ + $(error '$(bin)' is not installed or available in PATH)\ + )\ +) + +# Make sure we have at least git v2 +ifneq ($(shell git --version | cut -d ' ' -f3 | cut -d. -f1),2) +$(error git is not compatible. Need at least git-2.0) +endif + +# ----------------------------------------------------------------------------- +# VARIABLES - PROJECT CONFIGURATIONS +# ----------------------------------------------------------------------------- + +# Root of the repository +override REPOROOT := $(shell git rev-parse --show-toplevel) + +# Directories +override SRC_DIR := $(REPOROOT)/rdk +override TESTS_DIR := $(REPOROOT)/tests +override TOOLS_DIR := $(REPOROOT)/tools +override REPORTS_DIR := $(REPOROOT)/.reports +override GITHOOKS_DIR := $(TOOLS_DIR)/githooks +override DOCS_DIR := $(REPOROOT)/docs +override TESTS_UNIT_DIR := $(TESTS_DIR)/unit + +# Setup.py configs +override PKG_SETUP := $(REPOROOT)/setup.py +override SETUP_PY_ARGS := --quiet --no-user-cfg + +# python versions +override PYENV_VERSION := $(shell head -1 $(REPOROOT)/.python-version) +export PYENV_VERSION + +# terraform version +override TFENV_TERRAFORM_VERSION := $(shell head -1 $(REPOROOT)/.terraform-version) +export TFENV_TERRAFORM_VERSION + +# pipenv configs +# https://pipenv.pypa.io/en/latest/advanced/#configuration-with-environment-variables +override PIPENV_VENV_IN_PROJECT := 1 +override PIPENV_DEFAULT_PYTHON_VERSION := $(PYENV_VERSION) +export PIPENV_VENV_IN_PROJECT +export PIPENV_DEFAULT_PYTHON_VERSION + +# Collections of file types in the repo +override py_files_in_repo := $(PKG_SETUP) $(SRC_DIR) $(TESTS_DIR) +override md_files_in_repo := $(REPOROOT)/README.md $(DOCS_DIR) + +# Path to init-snapshot helper +override INIT_SNAPSHOT := $(TOOLS_DIR)/ci/bin/init-snapshot.sh +ifneq ($(shell test -x $(INIT_SNAPSHOT); echo $$?),0) +$(shell chmod +x $(INIT_SNAPSHOT)) +endif + +# ------------------------------------------------------------------------------ +# TARGETS - PRIMARY +# ------------------------------------------------------------------------------ + +### * init | Initialize this repository for development +.PHONY: init +init: \ + _githooks-install \ + _python-pipenv-install \ + _helper-init-snapshot-save + +### * fmt | Format source code +.PHONY: fmt +fmt: \ + _helper-init-snapshot-check \ + _fmt-python-docstrings \ + _fmt-python-isort \ + _fmt-python-black \ + _fmt-markdown-mdformat + +### * lint | Lint source code +.PHONY: lint +lint: \ + _helper-init-snapshot-check \ + _test-reports-mkdir \ + _lint-python-docstrings \ + _lint-python-isort \ + _lint-python-black \ + _lint-python-pylint \ + _lint-python-bandit \ + _lint-python-mypy \ + _lint-python-setup \ + _lint-markdown-mdformat + +### * test | Run unit tests +.PHONY: test +test: \ + _helper-init-snapshot-check \ + _test-reports-mkdir \ + _test-python-pytest + +### * sonar | Run sonar analysis +.PHONY: sonar +sonar: \ + _helper-init-snapshot-check \ + _test-sonar-scan + +### * build | Build python package +.PHONY: build +build: \ + _helper-init-snapshot-check \ + _lint-python-setup \ + _build-wheel \ + _deploy-check-dist + +### * deploy | Publish python package +.PHONY: deploy +deploy: \ + _helper-init-snapshot-check \ + _deploy-check-dist \ + _deploy-upload-dist + +### * freeze | Update and lock dependencies +.PHONY: freeze +freeze: \ + _python-pipenv-lock \ + _python-pipenv-gen-requirements + +### * docs-build | Build documentation from sources +.PHONY: docs-build +docs-build: \ + _helper-init-snapshot-check \ + _docs-generate-ref-cli \ + _docs-generate-ref-api \ + _docs-build + +### * docs-server | Start a local server to host documentation +.PHONY: docs-server +docs-server: \ + docs-build \ + _docs-serve + +### * docs-deploy | Publish documentation to Github Pages +.PHONY: docs-deploy +docs-deploy: \ + docs-build \ + _docs-publish-gh-pages + +### * tf-init | Initialize terraform +.PHONY: tf-init +tf-init: \ + _helper-init-snapshot-check \ + _tf-create-plugin-cache \ + _tf-init + +### * tf-plan | Run terraform-plan +.PHONY: tf-plan +tf-plan: \ + _helper-init-snapshot-check \ + _tf-clean-planfile \ + _tf-plan + +### * tf-apply | Run terraform-apply +.PHONY: tf-apply +tf-apply: \ + _helper-init-snapshot-check \ + _tf-apply + +### * tf-destroy | Run terraform-destroy +.PHONY: tf-destroy +tf-destroy: \ + _helper-init-snapshot-check \ + _tf-clean-planfile \ + _tf-destroy \ + _tf-workspace-delete \ + _tf-clean-data-dir + +### * clean | Clean repository +.PHONY: clean +clean: \ + _githooks-clean \ + _clean-dist \ + _test-reports-rm \ + _python-pipenv-rm \ + _clean-all-tf-data-dir \ + _clean-empty-dirs \ + _clean-git + +# ----------------------------------------------------------------------------- +# TARGETS - PYTHON DEPENDENCY MANAGEMENT +# ----------------------------------------------------------------------------- + +.PHONY: _python-pipenv-install +_python-pipenv-install: + @pipenv sync --dev + @pipenv clean + @pipenv check || true + +.PHONY: _python-pipenv-lock +_python-pipenv-lock: + @rm -f Pipfile.lock + @pipenv lock --clear --dev + +.PHONY: _python-pipenv-gen-requirements +_python-pipenv-gen-requirements: + @rm -f > $(REPOROOT)/requirements.txt + @pipenv requirements > $(REPOROOT)/requirements.txt + +.PHONY: _python-pipenv-rm +_python-pipenv-rm: + @pipenv --rm || true + @pipenv --clear + +# ------------------------------------------------------------------------------ +# TARGETS - FORMATTING +# ------------------------------------------------------------------------------ + +.PHONY: _fmt-python-isort +_fmt-python-isort: + @pipenv run -- isort -- $(py_files_in_repo) + +.PHONY: _fmt-python-black +_fmt-python-black: + @pipenv run -- black -- $(py_files_in_repo) + +.PHONY: _fmt-python-docstrings +_fmt-python-docstrings: + @pipenv run -- docformatter \ + --in-place \ + --recursive \ + --blank \ + --pre-summary-newline \ + --make-summary-multi-line \ + -- $(py_files_in_repo) + +.PHONY: _fmt-markdown-mdformat +_fmt-markdown-mdformat: + @pipenv run -- mdformat \ + --number \ + --wrap no \ + -- $(md_files_in_repo) + +# ----------------------------------------------------------------------------- +# TARGETS - LINTING +# ----------------------------------------------------------------------------- + +.PHONY: _lint-python-isort +_lint-python-isort: + @pipenv run -- isort --check -- $(py_files_in_repo) + +.PHONY: _lint-python-black +_lint-python-black: + @pipenv run -- black --check -- $(py_files_in_repo) + +# pylint runs multiple times +# We need to do this to support multiple output formats +# run1: For the terminal (developer feedback) +# run2: For sonar compatible messages +# run3: For JSON formatted output, that then produces an HTML +.PHONY: _lint-python-pylint +_lint-python-pylint: + @pipenv run -- pylint \ + --reports=n \ + --output-format=colorized \ + -- $(SRC_DIR) + @pipenv run -- pylint \ + --exit-zero \ + --reports=n \ + --output-format=text \ + --msg-template='{path}:{line}: [{msg_id}({symbol}), {obj}] {msg}' \ + -- $(SRC_DIR) \ + > $(REPORTS_DIR)/pylint-sonar.txt + @pipenv run -- pylint \ + --exit-zero \ + --reports=y \ + --output-format=jsonextended \ + -- $(SRC_DIR) \ + > $(REPORTS_DIR)/pylint.json + @pipenv run -- pylint-json2html \ + --input-format jsonextended \ + --output $(REPORTS_DIR)/pylint.html \ + $(REPORTS_DIR)/pylint.json + +# bandit runs multiple times +# We need to do this to support multiple output formats +# run1: For the terminal (developer feedback) +# run2: For sonar compatible messages +# run3: For HTML report +.PHONY: _lint-python-bandit +_lint-python-bandit: + @pipenv run -- bandit \ + --recursive \ + --quiet \ + --configfile $(REPOROOT)/.bandit.yaml \ + --format screen \ + -- $(SRC_DIR) + @pipenv run -- bandit \ + --recursive \ + --quiet \ + --exit-zero \ + --configfile $(REPOROOT)/.bandit.yaml \ + --format json \ + --output $(REPORTS_DIR)/bandit.json \ + -- $(SRC_DIR) + @pipenv run -- bandit \ + --recursive \ + --quiet \ + --exit-zero \ + --configfile $(REPOROOT)/.bandit.yaml \ + --format html \ + --output $(REPORTS_DIR)/bandit.html \ + -- $(SRC_DIR) + +.PHONY: _lint-python-mypy +_lint-python-mypy: + @pipenv run -- mypy \ + -- $(SRC_DIR) + +.PHONY: _lint-python-docstrings +_lint-python-docstrings: + @pipenv run -- docformatter \ + --check \ + --recursive \ + --blank \ + --pre-summary-newline \ + --make-summary-multi-line \ + -- $(py_files_in_repo) + +.PHONY: _lint-python-setup +_lint-python-setup: + @pipenv run -- \ + python -W ignore -- \ + $(PKG_SETUP) $(SETUP_PY_ARGS) check --strict + +.PHONY: _lint-markdown-mdformat +_lint-markdown-mdformat: + @pipenv run -- mdformat \ + --check \ + --number \ + --wrap no \ + -- $(md_files_in_repo) + +# ----------------------------------------------------------------------------- +# TARGETS - TESTING +# ----------------------------------------------------------------------------- + +.PHONY: _test-reports-mkdir +_test-reports-mkdir: + @mkdir -p $(REPORTS_DIR) + +.PHONY: _test-reports-rm +_test-reports-rm: + @rm -rf $(REPORTS_DIR) + +.PHONY: _test-python-pytest +_test-python-pytest: + @rm -rf $(REPOROOT)/.rdk + @pipenv run -- pytest $(TESTS_UNIT_DIR) + @rm -rf $(REPOROOT)/.rdk + +.PHONY: _test-sonar-scan +_test-sonar-scan: + @if ! command -v sonar-scanner >/dev/null 2>&1; then \ + echo "sonar-scanner is not installed" >&2; \ + exit 1; \ + fi + @if ! test -n "$${SONAR_TOKEN}"; then \ + echo "SONAR_TOKEN is not set" >&2; \ + exit 1; \ + fi + @rdk_version=$$(grep 'VERSION.*=.*' \ + rdk/__init__.py \ + | head -1 | cut -d '=' -f2 \ + | tr -d ' ' | tr -d '"' \ + ) \ + && git_branch=$$(git rev-parse --abbrev-ref HEAD) \ + && export SONAR_SCANNER_OPTS="-Xmx512m" \ + && sonar-scanner \ + -Dsonar.login="$${SONAR_TOKEN}" \ + -Dsonar.projectVersion="$${rdk_version}" \ + -Dsonar.branch.name="$${git_branch}" + +# ----------------------------------------------------------------------------- +# TARGETS - BUILD +# ----------------------------------------------------------------------------- + +.PHONY: _build-wheel +_build-wheel: + @pipenv run -- \ + python -W ignore -- \ + $(PKG_SETUP) $(SETUP_PY_ARGS) bdist_wheel \ + --universal \ + --python-tag "py3" \ + --owner "nobody" \ + --group "nobody" + +# ----------------------------------------------------------------------------- +# TARGETS - DEPLOY +# ----------------------------------------------------------------------------- + +.PHONY: _deploy-check-dist +_deploy-check-dist: + @find ./dist -mindepth 1 -maxdepth 1 -type f -print0 \ + | xargs -0 -- pipenv run -- twine check --strict + +.PHONY: _deploy-upload-dist +_deploy-upload-dist: + @find ./dist -mindepth 1 -maxdepth 1 -type f -print0 \ + | xargs -0 -- pipenv run -- twine upload \ + --config-file $(REPOROOT)/twine.pypirc \ + --non-interactive \ + --repository artifactory \ + --verbose \ + -- + +# ------------------------------------------------------------------------------ +# TARGETS - GITHOOKS +# ------------------------------------------------------------------------------ + +.PHONY: _githooks-install +_githooks-install: + @chmod +x $(GITHOOKS_DIR)/bin/* + @git config --local core.hooksPath $(GITHOOKS_DIR)/bin + @git config --local commit.template $(GITHOOKS_DIR)/etc/commit-template + @git config --local fetch.prune true + @git config --local fetch.pruneTags true + @git config --local push.default simple + @git config --local pull.ff true + @git config --local pull.rebase false + @git config --local user.useConfigOnly true + +.PHONY: _githooks-clean +_githooks-clean: + @for c in \ + core.hooksPath \ + commit.template \ + ; do \ + if git config --local --get $$c >/dev/null 2>&1; then \ + git config --local --unset $$c; \ + fi; \ + done + +# ----------------------------------------------------------------------------- +# TARGETS - DOCS +# ----------------------------------------------------------------------------- + +.PHONY: _docs-generate-ref-cli +_docs-generate-ref-cli: + @chmod +x $(TOOLS_DIR)/docs/bin/generate-ref-cli.sh + @pipenv run -- $(TOOLS_DIR)/docs/bin/generate-ref-cli.sh + +.PHONY: _docs-generate-ref-api +_docs-generate-ref-api: + @rm -rf $(DOCS_DIR)/reference/api + @pipenv run -- pdoc -o $(DOCS_DIR)/reference/api rdk.pytest + +.PHONY: _docs-build +_docs-build: + @pipenv run -- mkdocs build + +.PHONY: _docs-serve +_docs-serve: + @pipenv run -- mkdocs serve + +.PHONY: _docs-publish-gh-pages +_docs-publish-gh-pages: + @pipenv run -- mkdocs gh-deploy --message "docs: publish from {sha}" + +# ----------------------------------------------------------------------------- +# TARGETS - CLEAN +# ----------------------------------------------------------------------------- + +.PHONY: _clean-dist +_clean-dist: + @rm -rf ./build ./dist ./*.egg-info + +.PHONY: _clean-git +_clean-git: + @git clean -fdXq + +.PHONY: _clean-empty-dirs +_clean-empty-dirs: + @find $(REPOROOT) -type d -empty -print0 \ + | xargs -0 -- rm -rf + +.PHONY: _clean-all-tf-data-dir +_clean-all-tf-data-dir: + @find $(REPOROOT) -type d -name '.terraform' -print0 \ + | xargs -0 -- rm -rf + @find $(REPOROOT) -type d -name '*.terraform' -print0 \ + | xargs -0 -- rm -rf + @find $(REPOROOT) -type f -name 'tfplan' -print0 \ + | xargs -0 -- rm -f + @find $(REPOROOT) -type f -name '*tfplan*' -print0 \ + | xargs -0 -- rm -f + +# ----------------------------------------------------------------------------- +# TARGETS - HELPERS +# ----------------------------------------------------------------------------- + +.PHONY: _helper-init-snapshot-save +_helper-init-snapshot-save: + @$(INIT_SNAPSHOT) save + +.PHONY: _helper-init-snapshot-check +_helper-init-snapshot-check: + @$(INIT_SNAPSHOT) check ; rc=$$?; \ + if [[ "$$rc" -eq 0 ]]; then exit 0; fi; \ + if [[ "$$rc" -eq 1 ]]; then \ + echo "ERROR: Failed to check if repository initialization is required." >&2; \ + exit 1; \ + fi; \ + if [[ "$$rc" -eq 2 ]]; then \ + echo "WARNING: Repository initialization is required. Running now ..." >&2; \ + cd $(REPOROOT) || exit 1; \ + make init || exit 1; \ + fi + +# ----------------------------------------------------------------------------- +# TARGETS - HELP (DEFAULT) +# ----------------------------------------------------------------------------- + +### * help | Prints this message +.PHONY: help +.DEFAULT_GOAL := help +help: + @echo "USAGE: make [target ...]" + @echo + @echo "TARGETS:" + @echo + @grep -E '^###[[:blank:]]*\*[[:blank:]]*' $(lastword $(MAKEFILE_LIST)) \ + | sed -e 's|^###[[:blank:]]*\*[[:blank:]]*| |g' \ + | column -s'|' -t + @echo + +############################################################################### diff --git a/Pipfile b/Pipfile new file mode 100644 index 00000000..a9196efa --- /dev/null +++ b/Pipfile @@ -0,0 +1,57 @@ +[[source]] +name = "pypi" +url = "https://pypi.python.org/simple" +verify_ssl = true + +[dev-packages] +bandit = ">=1,<2" +docformatter = ">=1,<2" +gitlint = "<1" +isort= ">=5,<6" +lxml = ">=4,<5" +mdformat = "<1" +mdformat-beautysh = "<1" +mdformat-black = "<1" +mdformat-config = "<1" +mdformat-frontmatter = "<1" +mdformat-toc = "<1" +mkdocs = ">=1,<2" +mkdocs-material = ">=7,<8" +moto = { extras = ["cloudwatch", "sts", "s3"], version = ">=2,<3" } +mypy = "<1" +pdoc = ">=7,<8" +pylint = ">=2,<3" +pylint-json2html = "<1" +pylint-pytest = ">=1,<2" +pytest = ">=6,<7" +pytest-asyncio = "<1" +pytest-console-scripts = ">=1,<2" +pytest-cov = ">=2,<3" +pytest-env = "<1" +pytest-httpx = "<1" +pytest-mock = ">=3,<4" +pytest-reportlog = "<1" +twine = ">=3,<4" +types-aiofiles = "<1" + +# pinned to full version since we can't specify pre-releases for a single pkg +# see: https://github.com/pypa/pipenv/issues/1760 +black = "==21.7b0" + +# This package +rdk = {editable = true, path = "."} + +[packages] +# NOTE: If you update this, also update setup.py +aiofiles = "<1" +aws-cdk-lib = ">=2" +constructs = ">=10,<11" +# boto3 = ">=1,<2" +colorlog = ">=4,<5" +httpx = "<1" +mergedeep = ">=1,<2" +pytest = ">=6,<7" +semver = ">=2,<3" + +[requires] +python_version = "3.8" diff --git a/Pipfile.lock b/Pipfile.lock new file mode 100644 index 00000000..709fd44a --- /dev/null +++ b/Pipfile.lock @@ -0,0 +1,1977 @@ +{ + "_meta": { + "hash": { + "sha256": "7a6857503771f671fed5b5daf86254a4f9fefdbb5e7ecbe6f68fd144aff8e597" + }, + "pipfile-spec": 6, + "requires": { + "python_version": "3.8" + }, + "sources": [ + { + "name": "pypi", + "url": "https://pypi.python.org/simple", + "verify_ssl": true + } + ] + }, + "default": { + "aiofiles": { + "hashes": [ + "sha256:7a973fc22b29e9962d0897805ace5856e6a566ab1f0c8e5c91ff6c866519c937", + "sha256:8334f23235248a3b2e83b2c3a78a22674f39969b96397126cc93664d9a901e59" + ], + "index": "pypi", + "version": "==0.8.0" + }, + "anyio": { + "hashes": [ + "sha256:25ea0d673ae30af41a0c442f81cf3b38c7e79fdc7b60335a4c14e05eb0947421", + "sha256:fbbe32bd270d2a2ef3ed1c5d45041250284e31fc0a4df4a5a6071842051a51e3" + ], + "markers": "python_full_version >= '3.6.2'", + "version": "==3.6.2" + }, + "attrs": { + "hashes": [ + "sha256:29e95c7f6778868dbd49170f98f8818f78f3dc5e0e37c0b1f474e3561b240836", + "sha256:c9227bfc2f01993c03f68db37d1d15c9690188323c067c641f1a35ca58185f99" + ], + "markers": "python_version >= '3.6'", + "version": "==22.2.0" + }, + "aws-cdk-lib": { + "hashes": [ + "sha256:0d7001b0f507dcd435c6c20688e61d6c45c297e54bae2bf36256e10520668a8a", + "sha256:a0aeaf0e0d0dcc36fe52a1df09708028a8f71f54116bc3f2afec546b0d90c256" + ], + "index": "pypi", + "version": "==2.72.1" + }, + "aws-cdk.asset-awscli-v1": { + "hashes": [ + "sha256:90adb0f2405e0794607dddab09c7427ca02941655fbfe0164459111d7509b1a3", + "sha256:fa6d42e0d026de4cbb610672acdbfa00db30e763613cfabb0c7e7b6cea63275d" + ], + "markers": "python_version ~= '3.7'", + "version": "==2.2.129" + }, + "aws-cdk.asset-kubectl-v20": { + "hashes": [ + "sha256:9834cdb150c5590aea4e5eba6de2a89b4c60617451181c524810c5a75154565c", + "sha256:a2fad1a5a35a94a465efe60859f91e45dacc33261fb9bbf1cf9bbc6e2f70e9d6" + ], + "markers": "python_version ~= '3.7'", + "version": "==2.1.1" + }, + "aws-cdk.asset-node-proxy-agent-v5": { + "hashes": [ + "sha256:31ef1c6e49ca1baaa4e32b1b4f1ba6c1f493939741387ec30fe581aebc2a18ae", + "sha256:d3c5402148ec2964b6faec33043760182c7434e3e000369363c265c01717cd0c" + ], + "markers": "python_version ~= '3.7'", + "version": "==2.0.105" + }, + "cattrs": { + "hashes": [ + "sha256:bc12b1f0d000b9f9bee83335887d532a1d3e99a833d1bf0882151c97d3e68c21", + "sha256:f0eed5642399423cf656e7b66ce92cdc5b963ecafd041d1b24d136fdde7acf6d" + ], + "markers": "python_version >= '3.7'", + "version": "==22.2.0" + }, + "certifi": { + "hashes": [ + "sha256:35824b4c3a97115964b408844d64aa14db1cc518f6562e8d7261699d1350a9e3", + "sha256:4ad3232f5e926d6718ec31cfc1fcadfde020920e278684144551c91769c7bc18" + ], + "markers": "python_version >= '3.6'", + "version": "==2022.12.7" + }, + "colorlog": { + "hashes": [ + "sha256:3dd15cb27e8119a24c1a7b5c93f9f3b455855e0f73993b1c25921b2f646f1dcd", + "sha256:59b53160c60902c405cdec28d38356e09d40686659048893e026ecbd589516b1" + ], + "index": "pypi", + "version": "==4.8.0" + }, + "constructs": { + "hashes": [ + "sha256:94e8f2dc238e30129013a808f23a109a4c5749b94616a8c51c2597ce49bd623c", + "sha256:e99390593511ceec1964beeab0977c6df4ca4dbdf0bf17dc3391471fb202e9fb" + ], + "index": "pypi", + "version": "==10.1.302" + }, + "exceptiongroup": { + "hashes": [ + "sha256:232c37c63e4f682982c8b6459f33a8981039e5fb8756b2074364e5055c498c9e", + "sha256:d484c3090ba2889ae2928419117447a14daf3c1231d5e30d0aae34f354f01785" + ], + "markers": "python_version < '3.11'", + "version": "==1.1.1" + }, + "h11": { + "hashes": [ + "sha256:8f19fbbe99e72420ff35c00b27a34cb9937e902a8b810e2c88300c6f0a3b699d", + "sha256:e3fe4ac4b851c468cc8363d500db52c2ead036020723024a109d37346efaa761" + ], + "markers": "python_version >= '3.7'", + "version": "==0.14.0" + }, + "httpcore": { + "hashes": [ + "sha256:c5d6f04e2fc530f39e0c077e6a30caa53f1451096120f1f38b954afd0b17c0cb", + "sha256:da1fb708784a938aa084bde4feb8317056c55037247c787bd7e19eb2c2949dc0" + ], + "markers": "python_version >= '3.7'", + "version": "==0.16.3" + }, + "httpx": { + "hashes": [ + "sha256:9818458eb565bb54898ccb9b8b251a28785dd4a55afbc23d0eb410754fe7d0f9", + "sha256:a211fcce9b1254ea24f0cd6af9869b3d29aba40154e947d2a07bb499b3e310d6" + ], + "index": "pypi", + "version": "==0.23.3" + }, + "idna": { + "hashes": [ + "sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4", + "sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2" + ], + "version": "==3.4" + }, + "importlib-resources": { + "hashes": [ + "sha256:4be82589bf5c1d7999aedf2a45159d10cb3ca4f19b2271f8792bc8e6da7b22f6", + "sha256:7b1deeebbf351c7578e09bf2f63fa2ce8b5ffec296e0d349139d43cca061a81a" + ], + "markers": "python_version >= '3.7'", + "version": "==5.12.0" + }, + "iniconfig": { + "hashes": [ + "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3", + "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374" + ], + "markers": "python_version >= '3.7'", + "version": "==2.0.0" + }, + "jsii": { + "hashes": [ + "sha256:4da63ab99f2696cd063574460c94221f0a7de9d345e71dfb19dfbcecf8ca8355", + "sha256:ea3cace063f6a47cdf0a74c929618d779efab426fedb7692a8ac1b9b29797f8c" + ], + "markers": "python_version ~= '3.7'", + "version": "==1.80.0" + }, + "mergedeep": { + "hashes": [ + "sha256:0096d52e9dad9939c3d975a774666af186eda617e6ca84df4c94dec30004f2a8", + "sha256:70775750742b25c0d8f36c55aed03d24c3384d17c951b3175d898bd778ef0307" + ], + "index": "pypi", + "version": "==1.3.4" + }, + "packaging": { + "hashes": [ + "sha256:714ac14496c3e68c99c29b00845f7a2b85f3bb6f1078fd9f72fd20f0570002b2", + "sha256:b6ad297f8907de0fa2fe1ccbd26fdaf387f5f47c7275fedf8cce89f99446cf97" + ], + "markers": "python_version >= '3.7'", + "version": "==23.0" + }, + "pluggy": { + "hashes": [ + "sha256:4224373bacce55f955a878bf9cfa763c1e360858e330072059e10bad68531159", + "sha256:74134bbf457f031a36d68416e1509f34bd5ccc019f0bcc952c7b909d06b37bd3" + ], + "markers": "python_version >= '3.6'", + "version": "==1.0.0" + }, + "publication": { + "hashes": [ + "sha256:0248885351febc11d8a1098d5c8e3ab2dabcf3e8c0c96db1e17ecd12b53afbe6", + "sha256:68416a0de76dddcdd2930d1c8ef853a743cc96c82416c4e4d3b5d901c6276dc4" + ], + "version": "==0.0.3" + }, + "py": { + "hashes": [ + "sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719", + "sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378" + ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'", + "version": "==1.11.0" + }, + "pytest": { + "hashes": [ + "sha256:131b36680866a76e6781d13f101efb86cf674ebb9762eb70d3082b6f29889e89", + "sha256:7310f8d27bc79ced999e760ca304d69f6ba6c6649c0b60fb0e04a4a77cacc134" + ], + "index": "pypi", + "version": "==6.2.5" + }, + "python-dateutil": { + "hashes": [ + "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86", + "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9" + ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2'", + "version": "==2.8.2" + }, + "rfc3986": { + "extras": [ + "idna2008" + ], + "hashes": [ + "sha256:270aaf10d87d0d4e095063c65bf3ddbc6ee3d0b226328ce21e036f946e421835", + "sha256:a86d6e1f5b1dc238b218b012df0aa79409667bb209e58da56d0b94704e712a97" + ], + "version": "==1.5.0" + }, + "semver": { + "hashes": [ + "sha256:ced8b23dceb22134307c1b8abfa523da14198793d9787ac838e70e29e77458d4", + "sha256:fa0fe2722ee1c3f57eac478820c3a5ae2f624af8264cbdf9000c980ff7f75e3f" + ], + "index": "pypi", + "version": "==2.13.0" + }, + "six": { + "hashes": [ + "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926", + "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254" + ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2'", + "version": "==1.16.0" + }, + "sniffio": { + "hashes": [ + "sha256:e60305c5e5d314f5389259b7f22aaa33d8f7dee49763119234af3755c55b9101", + "sha256:eecefdce1e5bbfb7ad2eeaabf7c1eeb404d7757c379bd1f7e5cce9d8bf425384" + ], + "markers": "python_version >= '3.7'", + "version": "==1.3.0" + }, + "toml": { + "hashes": [ + "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b", + "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f" + ], + "markers": "python_version >= '2.6' and python_version not in '3.0, 3.1, 3.2'", + "version": "==0.10.2" + }, + "typeguard": { + "hashes": [ + "sha256:00edaa8da3a133674796cf5ea87d9f4b4c367d77476e185e80251cc13dfbb8c4", + "sha256:5e3e3be01e887e7eafae5af63d1f36c849aaa94e3a0112097312aabfa16284f1" + ], + "markers": "python_full_version >= '3.5.3'", + "version": "==2.13.3" + }, + "typing-extensions": { + "hashes": [ + "sha256:5cb5f4a79139d699607b3ef622a1dedafa84e115ab0024e0d9c044a9479ca7cb", + "sha256:fb33085c39dd998ac16d1431ebc293a8b3eedd00fd4a32de0ff79002c19511b4" + ], + "markers": "python_version >= '3.7'", + "version": "==4.5.0" + }, + "zipp": { + "hashes": [ + "sha256:112929ad649da941c23de50f356a2b5570c954b65150642bccdd66bf194d224b", + "sha256:48904fc76a60e542af151aded95726c1a5c34ed43ab4134b597665c86d7ad556" + ], + "markers": "python_version < '3.10'", + "version": "==3.15.0" + } + }, + "develop": { + "aiofiles": { + "hashes": [ + "sha256:7a973fc22b29e9962d0897805ace5856e6a566ab1f0c8e5c91ff6c866519c937", + "sha256:8334f23235248a3b2e83b2c3a78a22674f39969b96397126cc93664d9a901e59" + ], + "index": "pypi", + "version": "==0.8.0" + }, + "anyio": { + "hashes": [ + "sha256:25ea0d673ae30af41a0c442f81cf3b38c7e79fdc7b60335a4c14e05eb0947421", + "sha256:fbbe32bd270d2a2ef3ed1c5d45041250284e31fc0a4df4a5a6071842051a51e3" + ], + "markers": "python_full_version >= '3.6.2'", + "version": "==3.6.2" + }, + "appdirs": { + "hashes": [ + "sha256:7d5d0167b2b1ba821647616af46a749d1c653740dd0d2415100fe26e27afdf41", + "sha256:a841dacd6b99318a741b166adb07e19ee71a274450e68237b4650ca1055ab128" + ], + "version": "==1.4.4" + }, + "arrow": { + "hashes": [ + "sha256:3934b30ca1b9f292376d9db15b19446088d12ec58629bc3f0da28fd55fb633a1", + "sha256:5a49ab92e3b7b71d96cd6bfcc4df14efefc9dfa96ea19045815914a6ab6b1fe2" + ], + "markers": "python_version >= '3.6'", + "version": "==1.2.3" + }, + "astroid": { + "hashes": [ + "sha256:6e61b85c891ec53b07471aec5878f4ac6446a41e590ede0f2ce095f39f7d49dd", + "sha256:dea89d9f99f491c66ac9c04ebddf91e4acf8bd711722175fe6245c0725cc19bb" + ], + "markers": "python_full_version >= '3.7.2'", + "version": "==2.15.2" + }, + "astunparse": { + "hashes": [ + "sha256:5ad93a8456f0d084c3456d059fd9a92cce667963232cbf763eac3bc5b7940872", + "sha256:c2652417f2c8b5bb325c885ae329bdf3f86424075c4fd1a128674bc6fba4b8e8" + ], + "markers": "python_version < '3.9'", + "version": "==1.6.3" + }, + "attrs": { + "hashes": [ + "sha256:29e95c7f6778868dbd49170f98f8818f78f3dc5e0e37c0b1f474e3561b240836", + "sha256:c9227bfc2f01993c03f68db37d1d15c9690188323c067c641f1a35ca58185f99" + ], + "markers": "python_version >= '3.6'", + "version": "==22.2.0" + }, + "aws-cdk-lib": { + "hashes": [ + "sha256:0d7001b0f507dcd435c6c20688e61d6c45c297e54bae2bf36256e10520668a8a", + "sha256:a0aeaf0e0d0dcc36fe52a1df09708028a8f71f54116bc3f2afec546b0d90c256" + ], + "index": "pypi", + "version": "==2.72.1" + }, + "aws-cdk.asset-awscli-v1": { + "hashes": [ + "sha256:90adb0f2405e0794607dddab09c7427ca02941655fbfe0164459111d7509b1a3", + "sha256:fa6d42e0d026de4cbb610672acdbfa00db30e763613cfabb0c7e7b6cea63275d" + ], + "markers": "python_version ~= '3.7'", + "version": "==2.2.129" + }, + "aws-cdk.asset-kubectl-v20": { + "hashes": [ + "sha256:9834cdb150c5590aea4e5eba6de2a89b4c60617451181c524810c5a75154565c", + "sha256:a2fad1a5a35a94a465efe60859f91e45dacc33261fb9bbf1cf9bbc6e2f70e9d6" + ], + "markers": "python_version ~= '3.7'", + "version": "==2.1.1" + }, + "aws-cdk.asset-node-proxy-agent-v5": { + "hashes": [ + "sha256:31ef1c6e49ca1baaa4e32b1b4f1ba6c1f493939741387ec30fe581aebc2a18ae", + "sha256:d3c5402148ec2964b6faec33043760182c7434e3e000369363c265c01717cd0c" + ], + "markers": "python_version ~= '3.7'", + "version": "==2.0.105" + }, + "bandit": { + "hashes": [ + "sha256:75665181dc1e0096369112541a056c59d1c5f66f9bb74a8d686c3c362b83f549", + "sha256:bdfc739baa03b880c2d15d0431b31c658ffc348e907fe197e54e0389dd59e11e" + ], + "index": "pypi", + "version": "==1.7.5" + }, + "beautysh": { + "hashes": [ + "sha256:423e0c87cccf2af21cae9a75e04e0a42bc6ce28469c001ee8730242e10a45acd", + "sha256:8c7d9c4f2bd02c089194218238b7ecc78879506326b301eba1d5f49471a55bac" + ], + "markers": "python_full_version >= '3.6.2' and python_full_version < '4.0.0'", + "version": "==6.2.1" + }, + "black": { + "hashes": [ + "sha256:1c7aa6ada8ee864db745b22790a32f94b2795c253a75d6d9b5e439ff10d23116", + "sha256:c8373c6491de9362e39271630b65b964607bc5c79c83783547d76c839b3aa219" + ], + "index": "pypi", + "version": "==21.7b0" + }, + "bleach": { + "hashes": [ + "sha256:1a1a85c1595e07d8db14c5f09f09e6433502c51c595970edc090551f0db99414", + "sha256:33c16e3353dbd13028ab4799a0f89a83f113405c766e9c122df8a06f5b85b3f4" + ], + "markers": "python_version >= '3.7'", + "version": "==6.0.0" + }, + "boto3": { + "hashes": [ + "sha256:816a198a6cc4f283af6b21439d85be6dbe4b73c2232dd906c6bafb4fece28d19", + "sha256:9de90a2c0b853f84436b032b28947fc8a765dc462573a8d543b13f16c6579b40" + ], + "markers": "python_version >= '3.7'", + "version": "==1.26.107" + }, + "botocore": { + "hashes": [ + "sha256:ee1e43e6cd0864cc6811ba3f05123647612ee3f07a286a4c94f5885aa86d6922", + "sha256:f63942b4b7248c0b3d6ecbc2852cf0787c23ace2a91a012f7ee0b3ae3eb08f4f" + ], + "markers": "python_version >= '3.7'", + "version": "==1.29.107" + }, + "cattrs": { + "hashes": [ + "sha256:bc12b1f0d000b9f9bee83335887d532a1d3e99a833d1bf0882151c97d3e68c21", + "sha256:f0eed5642399423cf656e7b66ce92cdc5b963ecafd041d1b24d136fdde7acf6d" + ], + "markers": "python_version >= '3.7'", + "version": "==22.2.0" + }, + "certifi": { + "hashes": [ + "sha256:35824b4c3a97115964b408844d64aa14db1cc518f6562e8d7261699d1350a9e3", + "sha256:4ad3232f5e926d6718ec31cfc1fcadfde020920e278684144551c91769c7bc18" + ], + "markers": "python_version >= '3.6'", + "version": "==2022.12.7" + }, + "cffi": { + "hashes": [ + "sha256:00a9ed42e88df81ffae7a8ab6d9356b371399b91dbdf0c3cb1e84c03a13aceb5", + "sha256:03425bdae262c76aad70202debd780501fabeaca237cdfddc008987c0e0f59ef", + "sha256:04ed324bda3cda42b9b695d51bb7d54b680b9719cfab04227cdd1e04e5de3104", + "sha256:0e2642fe3142e4cc4af0799748233ad6da94c62a8bec3a6648bf8ee68b1c7426", + "sha256:173379135477dc8cac4bc58f45db08ab45d228b3363adb7af79436135d028405", + "sha256:198caafb44239b60e252492445da556afafc7d1e3ab7a1fb3f0584ef6d742375", + "sha256:1e74c6b51a9ed6589199c787bf5f9875612ca4a8a0785fb2d4a84429badaf22a", + "sha256:2012c72d854c2d03e45d06ae57f40d78e5770d252f195b93f581acf3ba44496e", + "sha256:21157295583fe8943475029ed5abdcf71eb3911894724e360acff1d61c1d54bc", + "sha256:2470043b93ff09bf8fb1d46d1cb756ce6132c54826661a32d4e4d132e1977adf", + "sha256:285d29981935eb726a4399badae8f0ffdff4f5050eaa6d0cfc3f64b857b77185", + "sha256:30d78fbc8ebf9c92c9b7823ee18eb92f2e6ef79b45ac84db507f52fbe3ec4497", + "sha256:320dab6e7cb2eacdf0e658569d2575c4dad258c0fcc794f46215e1e39f90f2c3", + "sha256:33ab79603146aace82c2427da5ca6e58f2b3f2fb5da893ceac0c42218a40be35", + "sha256:3548db281cd7d2561c9ad9984681c95f7b0e38881201e157833a2342c30d5e8c", + "sha256:3799aecf2e17cf585d977b780ce79ff0dc9b78d799fc694221ce814c2c19db83", + "sha256:39d39875251ca8f612b6f33e6b1195af86d1b3e60086068be9cc053aa4376e21", + "sha256:3b926aa83d1edb5aa5b427b4053dc420ec295a08e40911296b9eb1b6170f6cca", + "sha256:3bcde07039e586f91b45c88f8583ea7cf7a0770df3a1649627bf598332cb6984", + "sha256:3d08afd128ddaa624a48cf2b859afef385b720bb4b43df214f85616922e6a5ac", + "sha256:3eb6971dcff08619f8d91607cfc726518b6fa2a9eba42856be181c6d0d9515fd", + "sha256:40f4774f5a9d4f5e344f31a32b5096977b5d48560c5592e2f3d2c4374bd543ee", + "sha256:4289fc34b2f5316fbb762d75362931e351941fa95fa18789191b33fc4cf9504a", + "sha256:470c103ae716238bbe698d67ad020e1db9d9dba34fa5a899b5e21577e6d52ed2", + "sha256:4f2c9f67e9821cad2e5f480bc8d83b8742896f1242dba247911072d4fa94c192", + "sha256:50a74364d85fd319352182ef59c5c790484a336f6db772c1a9231f1c3ed0cbd7", + "sha256:54a2db7b78338edd780e7ef7f9f6c442500fb0d41a5a4ea24fff1c929d5af585", + "sha256:5635bd9cb9731e6d4a1132a498dd34f764034a8ce60cef4f5319c0541159392f", + "sha256:59c0b02d0a6c384d453fece7566d1c7e6b7bae4fc5874ef2ef46d56776d61c9e", + "sha256:5d598b938678ebf3c67377cdd45e09d431369c3b1a5b331058c338e201f12b27", + "sha256:5df2768244d19ab7f60546d0c7c63ce1581f7af8b5de3eb3004b9b6fc8a9f84b", + "sha256:5ef34d190326c3b1f822a5b7a45f6c4535e2f47ed06fec77d3d799c450b2651e", + "sha256:6975a3fac6bc83c4a65c9f9fcab9e47019a11d3d2cf7f3c0d03431bf145a941e", + "sha256:6c9a799e985904922a4d207a94eae35c78ebae90e128f0c4e521ce339396be9d", + "sha256:70df4e3b545a17496c9b3f41f5115e69a4f2e77e94e1d2a8e1070bc0c38c8a3c", + "sha256:7473e861101c9e72452f9bf8acb984947aa1661a7704553a9f6e4baa5ba64415", + "sha256:8102eaf27e1e448db915d08afa8b41d6c7ca7a04b7d73af6514df10a3e74bd82", + "sha256:87c450779d0914f2861b8526e035c5e6da0a3199d8f1add1a665e1cbc6fc6d02", + "sha256:8b7ee99e510d7b66cdb6c593f21c043c248537a32e0bedf02e01e9553a172314", + "sha256:91fc98adde3d7881af9b59ed0294046f3806221863722ba7d8d120c575314325", + "sha256:94411f22c3985acaec6f83c6df553f2dbe17b698cc7f8ae751ff2237d96b9e3c", + "sha256:98d85c6a2bef81588d9227dde12db8a7f47f639f4a17c9ae08e773aa9c697bf3", + "sha256:9ad5db27f9cabae298d151c85cf2bad1d359a1b9c686a275df03385758e2f914", + "sha256:a0b71b1b8fbf2b96e41c4d990244165e2c9be83d54962a9a1d118fd8657d2045", + "sha256:a0f100c8912c114ff53e1202d0078b425bee3649ae34d7b070e9697f93c5d52d", + "sha256:a591fe9e525846e4d154205572a029f653ada1a78b93697f3b5a8f1f2bc055b9", + "sha256:a5c84c68147988265e60416b57fc83425a78058853509c1b0629c180094904a5", + "sha256:a66d3508133af6e8548451b25058d5812812ec3798c886bf38ed24a98216fab2", + "sha256:a8c4917bd7ad33e8eb21e9a5bbba979b49d9a97acb3a803092cbc1133e20343c", + "sha256:b3bbeb01c2b273cca1e1e0c5df57f12dce9a4dd331b4fa1635b8bec26350bde3", + "sha256:cba9d6b9a7d64d4bd46167096fc9d2f835e25d7e4c121fb2ddfc6528fb0413b2", + "sha256:cc4d65aeeaa04136a12677d3dd0b1c0c94dc43abac5860ab33cceb42b801c1e8", + "sha256:ce4bcc037df4fc5e3d184794f27bdaab018943698f4ca31630bc7f84a7b69c6d", + "sha256:cec7d9412a9102bdc577382c3929b337320c4c4c4849f2c5cdd14d7368c5562d", + "sha256:d400bfb9a37b1351253cb402671cea7e89bdecc294e8016a707f6d1d8ac934f9", + "sha256:d61f4695e6c866a23a21acab0509af1cdfd2c013cf256bbf5b6b5e2695827162", + "sha256:db0fbb9c62743ce59a9ff687eb5f4afbe77e5e8403d6697f7446e5f609976f76", + "sha256:dd86c085fae2efd48ac91dd7ccffcfc0571387fe1193d33b6394db7ef31fe2a4", + "sha256:e00b098126fd45523dd056d2efba6c5a63b71ffe9f2bbe1a4fe1716e1d0c331e", + "sha256:e229a521186c75c8ad9490854fd8bbdd9a0c9aa3a524326b55be83b54d4e0ad9", + "sha256:e263d77ee3dd201c3a142934a086a4450861778baaeeb45db4591ef65550b0a6", + "sha256:ed9cb427ba5504c1dc15ede7d516b84757c3e3d7868ccc85121d9310d27eed0b", + "sha256:fa6693661a4c91757f4412306191b6dc88c1703f780c8234035eac011922bc01", + "sha256:fcd131dd944808b5bdb38e6f5b53013c5aa4f334c5cad0c72742f6eba4b73db0" + ], + "version": "==1.15.1" + }, + "charset-normalizer": { + "hashes": [ + "sha256:04afa6387e2b282cf78ff3dbce20f0cc071c12dc8f685bd40960cc68644cfea6", + "sha256:04eefcee095f58eaabe6dc3cc2262f3bcd776d2c67005880894f447b3f2cb9c1", + "sha256:0be65ccf618c1e7ac9b849c315cc2e8a8751d9cfdaa43027d4f6624bd587ab7e", + "sha256:0c95f12b74681e9ae127728f7e5409cbbef9cd914d5896ef238cc779b8152373", + "sha256:0ca564606d2caafb0abe6d1b5311c2649e8071eb241b2d64e75a0d0065107e62", + "sha256:10c93628d7497c81686e8e5e557aafa78f230cd9e77dd0c40032ef90c18f2230", + "sha256:11d117e6c63e8f495412d37e7dc2e2fff09c34b2d09dbe2bee3c6229577818be", + "sha256:11d3bcb7be35e7b1bba2c23beedac81ee893ac9871d0ba79effc7fc01167db6c", + "sha256:12a2b561af122e3d94cdb97fe6fb2bb2b82cef0cdca131646fdb940a1eda04f0", + "sha256:12d1a39aa6b8c6f6248bb54550efcc1c38ce0d8096a146638fd4738e42284448", + "sha256:1435ae15108b1cb6fffbcea2af3d468683b7afed0169ad718451f8db5d1aff6f", + "sha256:1c60b9c202d00052183c9be85e5eaf18a4ada0a47d188a83c8f5c5b23252f649", + "sha256:1e8fcdd8f672a1c4fc8d0bd3a2b576b152d2a349782d1eb0f6b8e52e9954731d", + "sha256:20064ead0717cf9a73a6d1e779b23d149b53daf971169289ed2ed43a71e8d3b0", + "sha256:21fa558996782fc226b529fdd2ed7866c2c6ec91cee82735c98a197fae39f706", + "sha256:22908891a380d50738e1f978667536f6c6b526a2064156203d418f4856d6e86a", + "sha256:3160a0fd9754aab7d47f95a6b63ab355388d890163eb03b2d2b87ab0a30cfa59", + "sha256:322102cdf1ab682ecc7d9b1c5eed4ec59657a65e1c146a0da342b78f4112db23", + "sha256:34e0a2f9c370eb95597aae63bf85eb5e96826d81e3dcf88b8886012906f509b5", + "sha256:3573d376454d956553c356df45bb824262c397c6e26ce43e8203c4c540ee0acb", + "sha256:3747443b6a904001473370d7810aa19c3a180ccd52a7157aacc264a5ac79265e", + "sha256:38e812a197bf8e71a59fe55b757a84c1f946d0ac114acafaafaf21667a7e169e", + "sha256:3a06f32c9634a8705f4ca9946d667609f52cf130d5548881401f1eb2c39b1e2c", + "sha256:3a5fc78f9e3f501a1614a98f7c54d3969f3ad9bba8ba3d9b438c3bc5d047dd28", + "sha256:3d9098b479e78c85080c98e1e35ff40b4a31d8953102bb0fd7d1b6f8a2111a3d", + "sha256:3dc5b6a8ecfdc5748a7e429782598e4f17ef378e3e272eeb1340ea57c9109f41", + "sha256:4155b51ae05ed47199dc5b2a4e62abccb274cee6b01da5b895099b61b1982974", + "sha256:49919f8400b5e49e961f320c735388ee686a62327e773fa5b3ce6721f7e785ce", + "sha256:53d0a3fa5f8af98a1e261de6a3943ca631c526635eb5817a87a59d9a57ebf48f", + "sha256:5f008525e02908b20e04707a4f704cd286d94718f48bb33edddc7d7b584dddc1", + "sha256:628c985afb2c7d27a4800bfb609e03985aaecb42f955049957814e0491d4006d", + "sha256:65ed923f84a6844de5fd29726b888e58c62820e0769b76565480e1fdc3d062f8", + "sha256:6734e606355834f13445b6adc38b53c0fd45f1a56a9ba06c2058f86893ae8017", + "sha256:6baf0baf0d5d265fa7944feb9f7451cc316bfe30e8df1a61b1bb08577c554f31", + "sha256:6f4f4668e1831850ebcc2fd0b1cd11721947b6dc7c00bf1c6bd3c929ae14f2c7", + "sha256:6f5c2e7bc8a4bf7c426599765b1bd33217ec84023033672c1e9a8b35eaeaaaf8", + "sha256:6f6c7a8a57e9405cad7485f4c9d3172ae486cfef1344b5ddd8e5239582d7355e", + "sha256:7381c66e0561c5757ffe616af869b916c8b4e42b367ab29fedc98481d1e74e14", + "sha256:73dc03a6a7e30b7edc5b01b601e53e7fc924b04e1835e8e407c12c037e81adbd", + "sha256:74db0052d985cf37fa111828d0dd230776ac99c740e1a758ad99094be4f1803d", + "sha256:75f2568b4189dda1c567339b48cba4ac7384accb9c2a7ed655cd86b04055c795", + "sha256:78cacd03e79d009d95635e7d6ff12c21eb89b894c354bd2b2ed0b4763373693b", + "sha256:80d1543d58bd3d6c271b66abf454d437a438dff01c3e62fdbcd68f2a11310d4b", + "sha256:830d2948a5ec37c386d3170c483063798d7879037492540f10a475e3fd6f244b", + "sha256:891cf9b48776b5c61c700b55a598621fdb7b1e301a550365571e9624f270c203", + "sha256:8f25e17ab3039b05f762b0a55ae0b3632b2e073d9c8fc88e89aca31a6198e88f", + "sha256:9a3267620866c9d17b959a84dd0bd2d45719b817245e49371ead79ed4f710d19", + "sha256:a04f86f41a8916fe45ac5024ec477f41f886b3c435da2d4e3d2709b22ab02af1", + "sha256:aaf53a6cebad0eae578f062c7d462155eada9c172bd8c4d250b8c1d8eb7f916a", + "sha256:abc1185d79f47c0a7aaf7e2412a0eb2c03b724581139193d2d82b3ad8cbb00ac", + "sha256:ac0aa6cd53ab9a31d397f8303f92c42f534693528fafbdb997c82bae6e477ad9", + "sha256:ac3775e3311661d4adace3697a52ac0bab17edd166087d493b52d4f4f553f9f0", + "sha256:b06f0d3bf045158d2fb8837c5785fe9ff9b8c93358be64461a1089f5da983137", + "sha256:b116502087ce8a6b7a5f1814568ccbd0e9f6cfd99948aa59b0e241dc57cf739f", + "sha256:b82fab78e0b1329e183a65260581de4375f619167478dddab510c6c6fb04d9b6", + "sha256:bd7163182133c0c7701b25e604cf1611c0d87712e56e88e7ee5d72deab3e76b5", + "sha256:c36bcbc0d5174a80d6cccf43a0ecaca44e81d25be4b7f90f0ed7bcfbb5a00909", + "sha256:c3af8e0f07399d3176b179f2e2634c3ce9c1301379a6b8c9c9aeecd481da494f", + "sha256:c84132a54c750fda57729d1e2599bb598f5fa0344085dbde5003ba429a4798c0", + "sha256:cb7b2ab0188829593b9de646545175547a70d9a6e2b63bf2cd87a0a391599324", + "sha256:cca4def576f47a09a943666b8f829606bcb17e2bc2d5911a46c8f8da45f56755", + "sha256:cf6511efa4801b9b38dc5546d7547d5b5c6ef4b081c60b23e4d941d0eba9cbeb", + "sha256:d16fd5252f883eb074ca55cb622bc0bee49b979ae4e8639fff6ca3ff44f9f854", + "sha256:d2686f91611f9e17f4548dbf050e75b079bbc2a82be565832bc8ea9047b61c8c", + "sha256:d7fc3fca01da18fbabe4625d64bb612b533533ed10045a2ac3dd194bfa656b60", + "sha256:dd5653e67b149503c68c4018bf07e42eeed6b4e956b24c00ccdf93ac79cdff84", + "sha256:de5695a6f1d8340b12a5d6d4484290ee74d61e467c39ff03b39e30df62cf83a0", + "sha256:e0ac8959c929593fee38da1c2b64ee9778733cdf03c482c9ff1d508b6b593b2b", + "sha256:e1b25e3ad6c909f398df8921780d6a3d120d8c09466720226fc621605b6f92b1", + "sha256:e633940f28c1e913615fd624fcdd72fdba807bf53ea6925d6a588e84e1151531", + "sha256:e89df2958e5159b811af9ff0f92614dabf4ff617c03a4c1c6ff53bf1c399e0e1", + "sha256:ea9f9c6034ea2d93d9147818f17c2a0860d41b71c38b9ce4d55f21b6f9165a11", + "sha256:f645caaf0008bacf349875a974220f1f1da349c5dbe7c4ec93048cdc785a3326", + "sha256:f8303414c7b03f794347ad062c0516cee0e15f7a612abd0ce1e25caf6ceb47df", + "sha256:fca62a8301b605b954ad2e9c3666f9d97f63872aa4efcae5492baca2056b74ab" + ], + "markers": "python_full_version >= '3.7.0'", + "version": "==3.1.0" + }, + "click": { + "hashes": [ + "sha256:7682dc8afb30297001674575ea00d1814d808d6a36af415a82bd481d37ba7b8e", + "sha256:bb4d8133cb15a609f44e8213d9b391b0809795062913b383c62be0ee95b1db48" + ], + "markers": "python_version >= '3.7'", + "version": "==8.1.3" + }, + "colorama": { + "hashes": [ + "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44", + "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6" + ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4, 3.5, 3.6'", + "version": "==0.4.6" + }, + "colorlog": { + "hashes": [ + "sha256:3dd15cb27e8119a24c1a7b5c93f9f3b455855e0f73993b1c25921b2f646f1dcd", + "sha256:59b53160c60902c405cdec28d38356e09d40686659048893e026ecbd589516b1" + ], + "index": "pypi", + "version": "==4.8.0" + }, + "constructs": { + "hashes": [ + "sha256:94e8f2dc238e30129013a808f23a109a4c5749b94616a8c51c2597ce49bd623c", + "sha256:e99390593511ceec1964beeab0977c6df4ca4dbdf0bf17dc3391471fb202e9fb" + ], + "index": "pypi", + "version": "==10.1.302" + }, + "coverage": { + "hashes": [ + "sha256:006ed5582e9cbc8115d2e22d6d2144a0725db542f654d9d4fda86793832f873d", + "sha256:046936ab032a2810dcaafd39cc4ef6dd295df1a7cbead08fe996d4765fca9fe4", + "sha256:0484d9dd1e6f481b24070c87561c8d7151bdd8b044c93ac99faafd01f695c78e", + "sha256:0ce383d5f56d0729d2dd40e53fe3afeb8f2237244b0975e1427bfb2cf0d32bab", + "sha256:186e0fc9cf497365036d51d4d2ab76113fb74f729bd25da0975daab2e107fd90", + "sha256:2199988e0bc8325d941b209f4fd1c6fa007024b1442c5576f1a32ca2e48941e6", + "sha256:299bc75cb2a41e6741b5e470b8c9fb78d931edbd0cd009c58e5c84de57c06731", + "sha256:3668291b50b69a0c1ef9f462c7df2c235da3c4073f49543b01e7eb1dee7dd540", + "sha256:36dd42da34fe94ed98c39887b86db9d06777b1c8f860520e21126a75507024f2", + "sha256:38004671848b5745bb05d4d621526fca30cee164db42a1f185615f39dc997292", + "sha256:387fb46cb8e53ba7304d80aadca5dca84a2fbf6fe3faf6951d8cf2d46485d1e5", + "sha256:3eb55b7b26389dd4f8ae911ba9bc8c027411163839dea4c8b8be54c4ee9ae10b", + "sha256:420f94a35e3e00a2b43ad5740f935358e24478354ce41c99407cddd283be00d2", + "sha256:4ac0f522c3b6109c4b764ffec71bf04ebc0523e926ca7cbe6c5ac88f84faced0", + "sha256:4c752d5264053a7cf2fe81c9e14f8a4fb261370a7bb344c2a011836a96fb3f57", + "sha256:4f01911c010122f49a3e9bdc730eccc66f9b72bd410a3a9d3cb8448bb50d65d3", + "sha256:4f68ee32d7c4164f1e2c8797535a6d0a3733355f5861e0f667e37df2d4b07140", + "sha256:4fa54fb483decc45f94011898727802309a109d89446a3c76387d016057d2c84", + "sha256:507e4720791977934bba016101579b8c500fb21c5fa3cd4cf256477331ddd988", + "sha256:53d0fd4c17175aded9c633e319360d41a1f3c6e352ba94edcb0fa5167e2bad67", + "sha256:55272f33da9a5d7cccd3774aeca7a01e500a614eaea2a77091e9be000ecd401d", + "sha256:5764e1f7471cb8f64b8cda0554f3d4c4085ae4b417bfeab236799863703e5de2", + "sha256:57b77b9099f172804e695a40ebaa374f79e4fb8b92f3e167f66facbf92e8e7f5", + "sha256:5afdad4cc4cc199fdf3e18088812edcf8f4c5a3c8e6cb69127513ad4cb7471a9", + "sha256:5cc0783844c84af2522e3a99b9b761a979a3ef10fb87fc4048d1ee174e18a7d8", + "sha256:5e1df45c23d4230e3d56d04414f9057eba501f78db60d4eeecfcb940501b08fd", + "sha256:6146910231ece63facfc5984234ad1b06a36cecc9fd0c028e59ac7c9b18c38c6", + "sha256:797aad79e7b6182cb49c08cc5d2f7aa7b2128133b0926060d0a8889ac43843be", + "sha256:7c20b731211261dc9739bbe080c579a1835b0c2d9b274e5fcd903c3a7821cf88", + "sha256:817295f06eacdc8623dc4df7d8b49cea65925030d4e1e2a7c7218380c0072c25", + "sha256:81f63e0fb74effd5be736cfe07d710307cc0a3ccb8f4741f7f053c057615a137", + "sha256:872d6ce1f5be73f05bea4df498c140b9e7ee5418bfa2cc8204e7f9b817caa968", + "sha256:8c99cb7c26a3039a8a4ee3ca1efdde471e61b4837108847fb7d5be7789ed8fd9", + "sha256:8dbe2647bf58d2c5a6c5bcc685f23b5f371909a5624e9f5cd51436d6a9f6c6ef", + "sha256:8efb48fa743d1c1a65ee8787b5b552681610f06c40a40b7ef94a5b517d885c54", + "sha256:92ebc1619650409da324d001b3a36f14f63644c7f0a588e331f3b0f67491f512", + "sha256:9d22e94e6dc86de981b1b684b342bec5e331401599ce652900ec59db52940005", + "sha256:ba279aae162b20444881fc3ed4e4f934c1cf8620f3dab3b531480cf602c76b7f", + "sha256:bc4803779f0e4b06a2361f666e76f5c2e3715e8e379889d02251ec911befd149", + "sha256:bfe7085783cda55e53510482fa7b5efc761fad1abe4d653b32710eb548ebdd2d", + "sha256:c448b5c9e3df5448a362208b8d4b9ed85305528313fca1b479f14f9fe0d873b8", + "sha256:c90e73bdecb7b0d1cea65a08cb41e9d672ac6d7995603d6465ed4914b98b9ad7", + "sha256:d2b96123a453a2d7f3995ddb9f28d01fd112319a7a4d5ca99796a7ff43f02af5", + "sha256:d52f0a114b6a58305b11a5cdecd42b2e7f1ec77eb20e2b33969d702feafdd016", + "sha256:d530191aa9c66ab4f190be8ac8cc7cfd8f4f3217da379606f3dd4e3d83feba69", + "sha256:d683d230b5774816e7d784d7ed8444f2a40e7a450e5720d58af593cb0b94a212", + "sha256:db45eec1dfccdadb179b0f9ca616872c6f700d23945ecc8f21bb105d74b1c5fc", + "sha256:db8c2c5ace167fd25ab5dd732714c51d4633f58bac21fb0ff63b0349f62755a8", + "sha256:e2926b8abedf750c2ecf5035c07515770944acf02e1c46ab08f6348d24c5f94d", + "sha256:e627dee428a176ffb13697a2c4318d3f60b2ccdde3acdc9b3f304206ec130ccd", + "sha256:efe1c0adad110bf0ad7fb59f833880e489a61e39d699d37249bdf42f80590169" + ], + "markers": "python_version >= '3.7'", + "version": "==7.2.2" + }, + "cryptography": { + "hashes": [ + "sha256:0a4e3406cfed6b1f6d6e87ed243363652b2586b2d917b0609ca4f97072994405", + "sha256:1e0af458515d5e4028aad75f3bb3fe7a31e46ad920648cd59b64d3da842e4356", + "sha256:2803f2f8b1e95f614419926c7e6f55d828afc614ca5ed61543877ae668cc3472", + "sha256:28d63d75bf7ae4045b10de5413fb1d6338616e79015999ad9cf6fc538f772d41", + "sha256:32057d3d0ab7d4453778367ca43e99ddb711770477c4f072a51b3ca69602780a", + "sha256:3a4805a4ca729d65570a1b7cac84eac1e431085d40387b7d3bbaa47e39890b88", + "sha256:63dac2d25c47f12a7b8aa60e528bfb3c51c5a6c5a9f7c86987909c6c79765554", + "sha256:650883cc064297ef3676b1db1b7b1df6081794c4ada96fa457253c4cc40f97db", + "sha256:6f2bbd72f717ce33100e6467572abaedc61f1acb87b8d546001328d7f466b778", + "sha256:7c872413353c70e0263a9368c4993710070e70ab3e5318d85510cc91cce77e7c", + "sha256:918cb89086c7d98b1b86b9fdb70c712e5a9325ba6f7d7cfb509e784e0cfc6917", + "sha256:9618a87212cb5200500e304e43691111570e1f10ec3f35569fdfcd17e28fd797", + "sha256:a805a7bce4a77d51696410005b3e85ae2839bad9aa38894afc0aa99d8e0c3160", + "sha256:cc3a621076d824d75ab1e1e530e66e7e8564e357dd723f2533225d40fe35c60c", + "sha256:cd033d74067d8928ef00a6b1327c8ea0452523967ca4463666eeba65ca350d4c", + "sha256:cf91e428c51ef692b82ce786583e214f58392399cf65c341bc7301d096fa3ba2", + "sha256:d36bbeb99704aabefdca5aee4eba04455d7a27ceabd16f3b3ba9bdcc31da86c4", + "sha256:d8aa3609d337ad85e4eb9bb0f8bcf6e4409bfb86e706efa9a027912169e89122", + "sha256:f5d7b79fa56bc29580faafc2ff736ce05ba31feaa9d4735048b0de7d9ceb2b94" + ], + "markers": "python_version >= '3.6'", + "version": "==40.0.1" + }, + "dill": { + "hashes": [ + "sha256:a07ffd2351b8c678dfc4a856a3005f8067aea51d6ba6c700796a4d9e280f39f0", + "sha256:e5db55f3687856d8fbdab002ed78544e1c4559a130302693d839dfe8f93f2373" + ], + "markers": "python_version < '3.11'", + "version": "==0.3.6" + }, + "docformatter": { + "hashes": [ + "sha256:2346dcc239b73bc4b62d1035e240d1338d154fb047a7e492f0168a93744222e2", + "sha256:dfad58437b560708eb74ccaccba5c91a0f98f534ed51b7af02aa35225e9eb6c2" + ], + "index": "pypi", + "version": "==1.6.0" + }, + "docutils": { + "hashes": [ + "sha256:33995a6753c30b7f577febfc2c50411fec6aac7f7ffeb7c4cfe5991072dcf9e6", + "sha256:5e1de4d849fee02c63b040a4a3fd567f4ab104defd8a5511fbbc24a8a017efbc" + ], + "markers": "python_version >= '3.7'", + "version": "==0.19" + }, + "exceptiongroup": { + "hashes": [ + "sha256:232c37c63e4f682982c8b6459f33a8981039e5fb8756b2074364e5055c498c9e", + "sha256:d484c3090ba2889ae2928419117447a14daf3c1231d5e30d0aae34f354f01785" + ], + "markers": "python_version < '3.11'", + "version": "==1.1.1" + }, + "ghp-import": { + "hashes": [ + "sha256:8337dd7b50877f163d4c0289bc1f1c7f127550241988d568c1db512c4324a619", + "sha256:9c535c4c61193c2df8871222567d7fd7e5014d835f97dc7b7439069e2413d343" + ], + "version": "==2.1.0" + }, + "gitdb": { + "hashes": [ + "sha256:6eb990b69df4e15bad899ea868dc46572c3f75339735663b81de79b06f17eb9a", + "sha256:c286cf298426064079ed96a9e4a9d39e7f3e9bf15ba60701e95f5492f28415c7" + ], + "markers": "python_version >= '3.7'", + "version": "==4.0.10" + }, + "gitlint": { + "hashes": [ + "sha256:26bb085959148d99fbbc178b4e56fda6c3edd7646b7c2a24d8ee1f8e036ed85d", + "sha256:b5b70fb894e80849b69abbb65ee7dbb3520fc3511f202a6e6b6ddf1a71ee8f61" + ], + "index": "pypi", + "version": "==0.19.1" + }, + "gitlint-core": { + "extras": [ + "trusted-deps" + ], + "hashes": [ + "sha256:7bf977b03ff581624a9e03f65ebb8502cc12dfaa3e92d23e8b2b54bbdaa29992", + "sha256:f41effd1dcbc06ffbfc56b6888cce72241796f517b46bd9fd4ab1b145056988c" + ], + "markers": "python_version >= '3.7'", + "version": "==0.19.1" + }, + "gitpython": { + "hashes": [ + "sha256:8ce3bcf69adfdf7c7d503e78fd3b1c492af782d58893b650adb2ac8912ddd573", + "sha256:f04893614f6aa713a60cbbe1e6a97403ef633103cdd0ef5eb6efe0deb98dbe8d" + ], + "markers": "python_version >= '3.7'", + "version": "==3.1.31" + }, + "h11": { + "hashes": [ + "sha256:8f19fbbe99e72420ff35c00b27a34cb9937e902a8b810e2c88300c6f0a3b699d", + "sha256:e3fe4ac4b851c468cc8363d500db52c2ead036020723024a109d37346efaa761" + ], + "markers": "python_version >= '3.7'", + "version": "==0.14.0" + }, + "httpcore": { + "hashes": [ + "sha256:c5d6f04e2fc530f39e0c077e6a30caa53f1451096120f1f38b954afd0b17c0cb", + "sha256:da1fb708784a938aa084bde4feb8317056c55037247c787bd7e19eb2c2949dc0" + ], + "markers": "python_version >= '3.7'", + "version": "==0.16.3" + }, + "httpx": { + "hashes": [ + "sha256:9818458eb565bb54898ccb9b8b251a28785dd4a55afbc23d0eb410754fe7d0f9", + "sha256:a211fcce9b1254ea24f0cd6af9869b3d29aba40154e947d2a07bb499b3e310d6" + ], + "index": "pypi", + "version": "==0.23.3" + }, + "idna": { + "hashes": [ + "sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4", + "sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2" + ], + "version": "==3.4" + }, + "importlib-metadata": { + "hashes": [ + "sha256:43ce9281e097583d758c2c708c4376371261a02c34682491a8e98352365aad20", + "sha256:ff80f3b5394912eb1b108fcfd444dc78b7f1f3e16b16188054bd01cb9cb86f09" + ], + "markers": "python_version < '3.10'", + "version": "==6.1.0" + }, + "importlib-resources": { + "hashes": [ + "sha256:4be82589bf5c1d7999aedf2a45159d10cb3ca4f19b2271f8792bc8e6da7b22f6", + "sha256:7b1deeebbf351c7578e09bf2f63fa2ce8b5ffec296e0d349139d43cca061a81a" + ], + "markers": "python_version >= '3.7'", + "version": "==5.12.0" + }, + "iniconfig": { + "hashes": [ + "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3", + "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374" + ], + "markers": "python_version >= '3.7'", + "version": "==2.0.0" + }, + "isort": { + "hashes": [ + "sha256:8bef7dde241278824a6d83f44a544709b065191b95b6e50894bdc722fcba0504", + "sha256:f84c2818376e66cf843d497486ea8fed8700b340f308f076c6fb1229dff318b6" + ], + "index": "pypi", + "version": "==5.12.0" + }, + "jaraco.classes": { + "hashes": [ + "sha256:2353de3288bc6b82120752201c6b1c1a14b058267fa424ed5ce5984e3b922158", + "sha256:89559fa5c1d3c34eff6f631ad80bb21f378dbcbb35dd161fd2c6b93f5be2f98a" + ], + "markers": "python_version >= '3.7'", + "version": "==3.2.3" + }, + "jinja2": { + "hashes": [ + "sha256:31351a702a408a9e7595a8fc6150fc3f43bb6bf7e319770cbc0db9df9437e852", + "sha256:6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61" + ], + "markers": "python_version >= '3.7'", + "version": "==3.1.2" + }, + "jmespath": { + "hashes": [ + "sha256:02e2e4cc71b5bcab88332eebf907519190dd9e6e82107fa7f83b1003a6252980", + "sha256:90261b206d6defd58fdd5e85f478bf633a2901798906be2ad389150c5c60edbe" + ], + "markers": "python_version >= '3.7'", + "version": "==1.0.1" + }, + "jsii": { + "hashes": [ + "sha256:4da63ab99f2696cd063574460c94221f0a7de9d345e71dfb19dfbcecf8ca8355", + "sha256:ea3cace063f6a47cdf0a74c929618d779efab426fedb7692a8ac1b9b29797f8c" + ], + "markers": "python_version ~= '3.7'", + "version": "==1.80.0" + }, + "keyring": { + "hashes": [ + "sha256:771ed2a91909389ed6148631de678f82ddc73737d85a927f382a8a1b157898cd", + "sha256:ba2e15a9b35e21908d0aaf4e0a47acc52d6ae33444df0da2b49d41a46ef6d678" + ], + "markers": "python_version >= '3.7'", + "version": "==23.13.1" + }, + "lazy-object-proxy": { + "hashes": [ + "sha256:09763491ce220c0299688940f8dc2c5d05fd1f45af1e42e636b2e8b2303e4382", + "sha256:0a891e4e41b54fd5b8313b96399f8b0e173bbbfc03c7631f01efbe29bb0bcf82", + "sha256:189bbd5d41ae7a498397287c408617fe5c48633e7755287b21d741f7db2706a9", + "sha256:18b78ec83edbbeb69efdc0e9c1cb41a3b1b1ed11ddd8ded602464c3fc6020494", + "sha256:1aa3de4088c89a1b69f8ec0dcc169aa725b0ff017899ac568fe44ddc1396df46", + "sha256:212774e4dfa851e74d393a2370871e174d7ff0ebc980907723bb67d25c8a7c30", + "sha256:2d0daa332786cf3bb49e10dc6a17a52f6a8f9601b4cf5c295a4f85854d61de63", + "sha256:5f83ac4d83ef0ab017683d715ed356e30dd48a93746309c8f3517e1287523ef4", + "sha256:659fb5809fa4629b8a1ac5106f669cfc7bef26fbb389dda53b3e010d1ac4ebae", + "sha256:660c94ea760b3ce47d1855a30984c78327500493d396eac4dfd8bd82041b22be", + "sha256:66a3de4a3ec06cd8af3f61b8e1ec67614fbb7c995d02fa224813cb7afefee701", + "sha256:721532711daa7db0d8b779b0bb0318fa87af1c10d7fe5e52ef30f8eff254d0cd", + "sha256:7322c3d6f1766d4ef1e51a465f47955f1e8123caee67dd641e67d539a534d006", + "sha256:79a31b086e7e68b24b99b23d57723ef7e2c6d81ed21007b6281ebcd1688acb0a", + "sha256:81fc4d08b062b535d95c9ea70dbe8a335c45c04029878e62d744bdced5141586", + "sha256:8fa02eaab317b1e9e03f69aab1f91e120e7899b392c4fc19807a8278a07a97e8", + "sha256:9090d8e53235aa280fc9239a86ae3ea8ac58eff66a705fa6aa2ec4968b95c821", + "sha256:946d27deaff6cf8452ed0dba83ba38839a87f4f7a9732e8f9fd4107b21e6ff07", + "sha256:9990d8e71b9f6488e91ad25f322898c136b008d87bf852ff65391b004da5e17b", + "sha256:9cd077f3d04a58e83d04b20e334f678c2b0ff9879b9375ed107d5d07ff160171", + "sha256:9e7551208b2aded9c1447453ee366f1c4070602b3d932ace044715d89666899b", + "sha256:9f5fa4a61ce2438267163891961cfd5e32ec97a2c444e5b842d574251ade27d2", + "sha256:b40387277b0ed2d0602b8293b94d7257e17d1479e257b4de114ea11a8cb7f2d7", + "sha256:bfb38f9ffb53b942f2b5954e0f610f1e721ccebe9cce9025a38c8ccf4a5183a4", + "sha256:cbf9b082426036e19c6924a9ce90c740a9861e2bdc27a4834fd0a910742ac1e8", + "sha256:d9e25ef10a39e8afe59a5c348a4dbf29b4868ab76269f81ce1674494e2565a6e", + "sha256:db1c1722726f47e10e0b5fdbf15ac3b8adb58c091d12b3ab713965795036985f", + "sha256:e7c21c95cae3c05c14aafffe2865bbd5e377cfc1348c4f7751d9dc9a48ca4bda", + "sha256:e8c6cfb338b133fbdbc5cfaa10fe3c6aeea827db80c978dbd13bc9dd8526b7d4", + "sha256:ea806fd4c37bf7e7ad82537b0757999264d5f70c45468447bb2b91afdbe73a6e", + "sha256:edd20c5a55acb67c7ed471fa2b5fb66cb17f61430b7a6b9c3b4a1e40293b1671", + "sha256:f0117049dd1d5635bbff65444496c90e0baa48ea405125c088e93d9cf4525b11", + "sha256:f0705c376533ed2a9e5e97aacdbfe04cecd71e0aa84c7c0595d02ef93b6e4455", + "sha256:f12ad7126ae0c98d601a7ee504c1122bcef553d1d5e0c3bfa77b16b3968d2734", + "sha256:f2457189d8257dd41ae9b434ba33298aec198e30adf2dcdaaa3a28b9994f6adb", + "sha256:f699ac1c768270c9e384e4cbd268d6e67aebcfae6cd623b4d7c3bfde5a35db59" + ], + "markers": "python_version >= '3.7'", + "version": "==1.9.0" + }, + "lxml": { + "hashes": [ + "sha256:01d36c05f4afb8f7c20fd9ed5badca32a2029b93b1750f571ccc0b142531caf7", + "sha256:04876580c050a8c5341d706dd464ff04fd597095cc8c023252566a8826505726", + "sha256:05ca3f6abf5cf78fe053da9b1166e062ade3fa5d4f92b4ed688127ea7d7b1d03", + "sha256:090c6543d3696cbe15b4ac6e175e576bcc3f1ccfbba970061b7300b0c15a2140", + "sha256:0dc313ef231edf866912e9d8f5a042ddab56c752619e92dfd3a2c277e6a7299a", + "sha256:0f2b1e0d79180f344ff9f321327b005ca043a50ece8713de61d1cb383fb8ac05", + "sha256:13598ecfbd2e86ea7ae45ec28a2a54fb87ee9b9fdb0f6d343297d8e548392c03", + "sha256:16efd54337136e8cd72fb9485c368d91d77a47ee2d42b057564aae201257d419", + "sha256:1ab8f1f932e8f82355e75dda5413a57612c6ea448069d4fb2e217e9a4bed13d4", + "sha256:223f4232855ade399bd409331e6ca70fb5578efef22cf4069a6090acc0f53c0e", + "sha256:2455cfaeb7ac70338b3257f41e21f0724f4b5b0c0e7702da67ee6c3640835b67", + "sha256:2899456259589aa38bfb018c364d6ae7b53c5c22d8e27d0ec7609c2a1ff78b50", + "sha256:2a29ba94d065945944016b6b74e538bdb1751a1db6ffb80c9d3c2e40d6fa9894", + "sha256:2a87fa548561d2f4643c99cd13131acb607ddabb70682dcf1dff5f71f781a4bf", + "sha256:2e430cd2824f05f2d4f687701144556646bae8f249fd60aa1e4c768ba7018947", + "sha256:36c3c175d34652a35475a73762b545f4527aec044910a651d2bf50de9c3352b1", + "sha256:3818b8e2c4b5148567e1b09ce739006acfaa44ce3156f8cbbc11062994b8e8dd", + "sha256:3ab9fa9d6dc2a7f29d7affdf3edebf6ece6fb28a6d80b14c3b2fb9d39b9322c3", + "sha256:3efea981d956a6f7173b4659849f55081867cf897e719f57383698af6f618a92", + "sha256:4c8f293f14abc8fd3e8e01c5bd86e6ed0b6ef71936ded5bf10fe7a5efefbaca3", + "sha256:5344a43228767f53a9df6e5b253f8cdca7dfc7b7aeae52551958192f56d98457", + "sha256:58bfa3aa19ca4c0f28c5dde0ff56c520fbac6f0daf4fac66ed4c8d2fb7f22e74", + "sha256:5b4545b8a40478183ac06c073e81a5ce4cf01bf1734962577cf2bb569a5b3bbf", + "sha256:5f50a1c177e2fa3ee0667a5ab79fdc6b23086bc8b589d90b93b4bd17eb0e64d1", + "sha256:63da2ccc0857c311d764e7d3d90f429c252e83b52d1f8f1d1fe55be26827d1f4", + "sha256:6749649eecd6a9871cae297bffa4ee76f90b4504a2a2ab528d9ebe912b101975", + "sha256:6804daeb7ef69e7b36f76caddb85cccd63d0c56dedb47555d2fc969e2af6a1a5", + "sha256:689bb688a1db722485e4610a503e3e9210dcc20c520b45ac8f7533c837be76fe", + "sha256:699a9af7dffaf67deeae27b2112aa06b41c370d5e7633e0ee0aea2e0b6c211f7", + "sha256:6b418afe5df18233fc6b6093deb82a32895b6bb0b1155c2cdb05203f583053f1", + "sha256:76cf573e5a365e790396a5cc2b909812633409306c6531a6877c59061e42c4f2", + "sha256:7b515674acfdcadb0eb5d00d8a709868173acece5cb0be3dd165950cbfdf5409", + "sha256:7b770ed79542ed52c519119473898198761d78beb24b107acf3ad65deae61f1f", + "sha256:7d2278d59425777cfcb19735018d897ca8303abe67cc735f9f97177ceff8027f", + "sha256:7e91ee82f4199af8c43d8158024cbdff3d931df350252288f0d4ce656df7f3b5", + "sha256:821b7f59b99551c69c85a6039c65b75f5683bdc63270fec660f75da67469ca24", + "sha256:822068f85e12a6e292803e112ab876bc03ed1f03dddb80154c395f891ca6b31e", + "sha256:8340225bd5e7a701c0fa98284c849c9b9fc9238abf53a0ebd90900f25d39a4e4", + "sha256:85cabf64adec449132e55616e7ca3e1000ab449d1d0f9d7f83146ed5bdcb6d8a", + "sha256:880bbbcbe2fca64e2f4d8e04db47bcdf504936fa2b33933efd945e1b429bea8c", + "sha256:8d0b4612b66ff5d62d03bcaa043bb018f74dfea51184e53f067e6fdcba4bd8de", + "sha256:8e20cb5a47247e383cf4ff523205060991021233ebd6f924bca927fcf25cf86f", + "sha256:925073b2fe14ab9b87e73f9a5fde6ce6392da430f3004d8b72cc86f746f5163b", + "sha256:998c7c41910666d2976928c38ea96a70d1aa43be6fe502f21a651e17483a43c5", + "sha256:9b22c5c66f67ae00c0199f6055705bc3eb3fcb08d03d2ec4059a2b1b25ed48d7", + "sha256:9f102706d0ca011de571de32c3247c6476b55bb6bc65a20f682f000b07a4852a", + "sha256:a08cff61517ee26cb56f1e949cca38caabe9ea9fbb4b1e10a805dc39844b7d5c", + "sha256:a0a336d6d3e8b234a3aae3c674873d8f0e720b76bc1d9416866c41cd9500ffb9", + "sha256:a35f8b7fa99f90dd2f5dc5a9fa12332642f087a7641289ca6c40d6e1a2637d8e", + "sha256:a38486985ca49cfa574a507e7a2215c0c780fd1778bb6290c21193b7211702ab", + "sha256:a5da296eb617d18e497bcf0a5c528f5d3b18dadb3619fbdadf4ed2356ef8d941", + "sha256:a6e441a86553c310258aca15d1c05903aaf4965b23f3bc2d55f200804e005ee5", + "sha256:a82d05da00a58b8e4c0008edbc8a4b6ec5a4bc1e2ee0fb6ed157cf634ed7fa45", + "sha256:ab323679b8b3030000f2be63e22cdeea5b47ee0abd2d6a1dc0c8103ddaa56cd7", + "sha256:b1f42b6921d0e81b1bcb5e395bc091a70f41c4d4e55ba99c6da2b31626c44892", + "sha256:b23e19989c355ca854276178a0463951a653309fb8e57ce674497f2d9f208746", + "sha256:b264171e3143d842ded311b7dccd46ff9ef34247129ff5bf5066123c55c2431c", + "sha256:b26a29f0b7fc6f0897f043ca366142d2b609dc60756ee6e4e90b5f762c6adc53", + "sha256:b64d891da92e232c36976c80ed7ebb383e3f148489796d8d31a5b6a677825efe", + "sha256:b9cc34af337a97d470040f99ba4282f6e6bac88407d021688a5d585e44a23184", + "sha256:bc718cd47b765e790eecb74d044cc8d37d58562f6c314ee9484df26276d36a38", + "sha256:be7292c55101e22f2a3d4d8913944cbea71eea90792bf914add27454a13905df", + "sha256:c83203addf554215463b59f6399835201999b5e48019dc17f182ed5ad87205c9", + "sha256:c9ec3eaf616d67db0764b3bb983962b4f385a1f08304fd30c7283954e6a7869b", + "sha256:ca34efc80a29351897e18888c71c6aca4a359247c87e0b1c7ada14f0ab0c0fb2", + "sha256:ca989b91cf3a3ba28930a9fc1e9aeafc2a395448641df1f387a2d394638943b0", + "sha256:d02a5399126a53492415d4906ab0ad0375a5456cc05c3fc0fc4ca11771745cda", + "sha256:d17bc7c2ccf49c478c5bdd447594e82692c74222698cfc9b5daae7ae7e90743b", + "sha256:d5bf6545cd27aaa8a13033ce56354ed9e25ab0e4ac3b5392b763d8d04b08e0c5", + "sha256:d6b430a9938a5a5d85fc107d852262ddcd48602c120e3dbb02137c83d212b380", + "sha256:da248f93f0418a9e9d94b0080d7ebc407a9a5e6d0b57bb30db9b5cc28de1ad33", + "sha256:da4dd7c9c50c059aba52b3524f84d7de956f7fef88f0bafcf4ad7dde94a064e8", + "sha256:df0623dcf9668ad0445e0558a21211d4e9a149ea8f5666917c8eeec515f0a6d1", + "sha256:e5168986b90a8d1f2f9dc1b841467c74221bd752537b99761a93d2d981e04889", + "sha256:efa29c2fe6b4fdd32e8ef81c1528506895eca86e1d8c4657fda04c9b3786ddf9", + "sha256:f1496ea22ca2c830cbcbd473de8f114a320da308438ae65abad6bab7867fe38f", + "sha256:f49e52d174375a7def9915c9f06ec4e569d235ad428f70751765f48d5926678c" + ], + "index": "pypi", + "version": "==4.9.2" + }, + "markdown": { + "hashes": [ + "sha256:cbb516f16218e643d8e0a95b309f77eb118cb138d39a4f27851e6a63581db874", + "sha256:f5da449a6e1c989a4cea2631aa8ee67caa5a2ef855d551c88f9e309f4634c621" + ], + "markers": "python_version >= '3.6'", + "version": "==3.3.7" + }, + "markdown-it-py": { + "hashes": [ + "sha256:5a35f8d1870171d9acc47b99612dc146129b631baf04970128b568f190d0cc30", + "sha256:7c9a5e412688bc771c67432cbfebcdd686c93ce6484913dccf06cb5a0bea35a1" + ], + "markers": "python_version >= '3.7'", + "version": "==2.2.0" + }, + "markupsafe": { + "hashes": [ + "sha256:0576fe974b40a400449768941d5d0858cc624e3249dfd1e0c33674e5c7ca7aed", + "sha256:085fd3201e7b12809f9e6e9bc1e5c96a368c8523fad5afb02afe3c051ae4afcc", + "sha256:090376d812fb6ac5f171e5938e82e7f2d7adc2b629101cec0db8b267815c85e2", + "sha256:0b462104ba25f1ac006fdab8b6a01ebbfbce9ed37fd37fd4acd70c67c973e460", + "sha256:137678c63c977754abe9086a3ec011e8fd985ab90631145dfb9294ad09c102a7", + "sha256:1bea30e9bf331f3fef67e0a3877b2288593c98a21ccb2cf29b74c581a4eb3af0", + "sha256:22152d00bf4a9c7c83960521fc558f55a1adbc0631fbb00a9471e097b19d72e1", + "sha256:22731d79ed2eb25059ae3df1dfc9cb1546691cc41f4e3130fe6bfbc3ecbbecfa", + "sha256:2298c859cfc5463f1b64bd55cb3e602528db6fa0f3cfd568d3605c50678f8f03", + "sha256:28057e985dace2f478e042eaa15606c7efccb700797660629da387eb289b9323", + "sha256:2e7821bffe00aa6bd07a23913b7f4e01328c3d5cc0b40b36c0bd81d362faeb65", + "sha256:2ec4f2d48ae59bbb9d1f9d7efb9236ab81429a764dedca114f5fdabbc3788013", + "sha256:340bea174e9761308703ae988e982005aedf427de816d1afe98147668cc03036", + "sha256:40627dcf047dadb22cd25ea7ecfe9cbf3bbbad0482ee5920b582f3809c97654f", + "sha256:40dfd3fefbef579ee058f139733ac336312663c6706d1163b82b3003fb1925c4", + "sha256:4cf06cdc1dda95223e9d2d3c58d3b178aa5dacb35ee7e3bbac10e4e1faacb419", + "sha256:50c42830a633fa0cf9e7d27664637532791bfc31c731a87b202d2d8ac40c3ea2", + "sha256:55f44b440d491028addb3b88f72207d71eeebfb7b5dbf0643f7c023ae1fba619", + "sha256:608e7073dfa9e38a85d38474c082d4281f4ce276ac0010224eaba11e929dd53a", + "sha256:63ba06c9941e46fa389d389644e2d8225e0e3e5ebcc4ff1ea8506dce646f8c8a", + "sha256:65608c35bfb8a76763f37036547f7adfd09270fbdbf96608be2bead319728fcd", + "sha256:665a36ae6f8f20a4676b53224e33d456a6f5a72657d9c83c2aa00765072f31f7", + "sha256:6d6607f98fcf17e534162f0709aaad3ab7a96032723d8ac8750ffe17ae5a0666", + "sha256:7313ce6a199651c4ed9d7e4cfb4aa56fe923b1adf9af3b420ee14e6d9a73df65", + "sha256:7668b52e102d0ed87cb082380a7e2e1e78737ddecdde129acadb0eccc5423859", + "sha256:7df70907e00c970c60b9ef2938d894a9381f38e6b9db73c5be35e59d92e06625", + "sha256:7e007132af78ea9df29495dbf7b5824cb71648d7133cf7848a2a5dd00d36f9ff", + "sha256:835fb5e38fd89328e9c81067fd642b3593c33e1e17e2fdbf77f5676abb14a156", + "sha256:8bca7e26c1dd751236cfb0c6c72d4ad61d986e9a41bbf76cb445f69488b2a2bd", + "sha256:8db032bf0ce9022a8e41a22598eefc802314e81b879ae093f36ce9ddf39ab1ba", + "sha256:99625a92da8229df6d44335e6fcc558a5037dd0a760e11d84be2260e6f37002f", + "sha256:9cad97ab29dfc3f0249b483412c85c8ef4766d96cdf9dcf5a1e3caa3f3661cf1", + "sha256:a4abaec6ca3ad8660690236d11bfe28dfd707778e2442b45addd2f086d6ef094", + "sha256:a6e40afa7f45939ca356f348c8e23048e02cb109ced1eb8420961b2f40fb373a", + "sha256:a6f2fcca746e8d5910e18782f976489939d54a91f9411c32051b4aab2bd7c513", + "sha256:a806db027852538d2ad7555b203300173dd1b77ba116de92da9afbc3a3be3eed", + "sha256:abcabc8c2b26036d62d4c746381a6f7cf60aafcc653198ad678306986b09450d", + "sha256:b8526c6d437855442cdd3d87eede9c425c4445ea011ca38d937db299382e6fa3", + "sha256:bb06feb762bade6bf3c8b844462274db0c76acc95c52abe8dbed28ae3d44a147", + "sha256:c0a33bc9f02c2b17c3ea382f91b4db0e6cde90b63b296422a939886a7a80de1c", + "sha256:c4a549890a45f57f1ebf99c067a4ad0cb423a05544accaf2b065246827ed9603", + "sha256:ca244fa73f50a800cf8c3ebf7fd93149ec37f5cb9596aa8873ae2c1d23498601", + "sha256:cf877ab4ed6e302ec1d04952ca358b381a882fbd9d1b07cccbfd61783561f98a", + "sha256:d9d971ec1e79906046aa3ca266de79eac42f1dbf3612a05dc9368125952bd1a1", + "sha256:da25303d91526aac3672ee6d49a2f3db2d9502a4a60b55519feb1a4c7714e07d", + "sha256:e55e40ff0cc8cc5c07996915ad367fa47da6b3fc091fdadca7f5403239c5fec3", + "sha256:f03a532d7dee1bed20bc4884194a16160a2de9ffc6354b3878ec9682bb623c54", + "sha256:f1cd098434e83e656abf198f103a8207a8187c0fc110306691a2e94a78d0abb2", + "sha256:f2bfb563d0211ce16b63c7cb9395d2c682a23187f54c3d79bfec33e6705473c6", + "sha256:f8ffb705ffcf5ddd0e80b65ddf7bed7ee4f5a441ea7d3419e861a12eaf41af58" + ], + "markers": "python_version >= '3.7'", + "version": "==2.1.2" + }, + "mccabe": { + "hashes": [ + "sha256:348e0240c33b60bbdf4e523192ef919f28cb2c3d7d5c7794f74009290f236325", + "sha256:6c2d30ab6be0e4a46919781807b4f0d834ebdd6c6e3dca0bda5a15f863427b6e" + ], + "markers": "python_version >= '3.6'", + "version": "==0.7.0" + }, + "mdformat": { + "hashes": [ + "sha256:76398d03baa394f331fb560fd0aed8257cf77b65b1c8146b92d395af16253662", + "sha256:99b105033207d2ab70ba1ced8e07327ed4ef1e0a6bc1c7c00207ea73ab502782" + ], + "index": "pypi", + "version": "==0.7.16" + }, + "mdformat-beautysh": { + "hashes": [ + "sha256:23e52dc93ce4cdee12033766a6146f656c72095f74bc80a2bdfde1974a50ee72", + "sha256:b63a0e3adfc29238917b5d163483952f9326f7737cbe3137d3c6c512daf70789" + ], + "index": "pypi", + "version": "==0.1.1" + }, + "mdformat-black": { + "hashes": [ + "sha256:57cad92aee314b87dee52a795cdb52469ab166589d2771e3ad3dd19db907ab62", + "sha256:92a7f83779428ca04e939fcbd196ef6340bf9c83f33b2c50d4d3bb734a63d7b2" + ], + "index": "pypi", + "version": "==0.1.1" + }, + "mdformat-config": { + "hashes": [ + "sha256:0af65deef832886e9a47df126d1a083f278e47ca631fc5f5750adfd7b84239c4", + "sha256:a3f99eaf9970ee473be8cb459e323545781183327cae9bf070d2d3fb421dfd13" + ], + "index": "pypi", + "version": "==0.1.3" + }, + "mdformat-frontmatter": { + "hashes": [ + "sha256:15d3eed1543849d4fe72b1f75b8dffd8b49750c5149186591a1b9617178e2aa2", + "sha256:9c13f6b7a53de7b401af3c95e66735237545bd174e6619392153b296135ffd49" + ], + "index": "pypi", + "version": "==0.4.1" + }, + "mdformat-toc": { + "hashes": [ + "sha256:49d1f47d563f47405f3c165c6a4c30e8404a39f56ae254a27c2a90dd7eae1849", + "sha256:e8735f7517068f274b58b83407491b75445dc938473a3d5fa6467c0db0142daa" + ], + "index": "pypi", + "version": "==0.3.0" + }, + "mdit-py-plugins": { + "hashes": [ + "sha256:ca9a0714ea59a24b2b044a1831f48d817dd0c817e84339f20e7889f392d77c4e", + "sha256:eee0adc7195e5827e17e02d2a258a2ba159944a0748f59c5099a4a27f78fcf6a" + ], + "markers": "python_version >= '3.7'", + "version": "==0.3.5" + }, + "mdurl": { + "hashes": [ + "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8", + "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba" + ], + "markers": "python_version >= '3.7'", + "version": "==0.1.2" + }, + "mergedeep": { + "hashes": [ + "sha256:0096d52e9dad9939c3d975a774666af186eda617e6ca84df4c94dec30004f2a8", + "sha256:70775750742b25c0d8f36c55aed03d24c3384d17c951b3175d898bd778ef0307" + ], + "index": "pypi", + "version": "==1.3.4" + }, + "mkdocs": { + "hashes": [ + "sha256:8947af423a6d0facf41ea1195b8e1e8c85ad94ac95ae307fe11232e0424b11c5", + "sha256:c8856a832c1e56702577023cd64cc5f84948280c1c0fcc6af4cd39006ea6aa8c" + ], + "index": "pypi", + "version": "==1.4.2" + }, + "mkdocs-material": { + "hashes": [ + "sha256:1b1dbd8ef2508b358d93af55a5c5db3f141c95667fad802301ec621c40c7c217", + "sha256:1b6b3e9e09f922c2d7f1160fe15c8f43d4adc0d6fb81aa6ff0cbc7ef5b78ec75" + ], + "index": "pypi", + "version": "==7.3.6" + }, + "mkdocs-material-extensions": { + "hashes": [ + "sha256:9c003da71e2cc2493d910237448c672e00cefc800d3d6ae93d2fc69979e3bd93", + "sha256:e41d9f38e4798b6617ad98ca8f7f1157b1e4385ac1459ca1e4ea219b556df945" + ], + "markers": "python_version >= '3.7'", + "version": "==1.1.1" + }, + "more-itertools": { + "hashes": [ + "sha256:cabaa341ad0389ea83c17a94566a53ae4c9d07349861ecb14dc6d0345cf9ac5d", + "sha256:d2bc7f02446e86a68911e58ded76d6561eea00cddfb2a91e7019bbb586c799f3" + ], + "markers": "python_version >= '3.7'", + "version": "==9.1.0" + }, + "moto": { + "extras": [ + "cloudwatch", + "s3", + "sts" + ], + "hashes": [ + "sha256:0c29f5813d4db69b2f99c5538909a5aba0ba1cb91a74c19eddd9bfdc39ed2ff3", + "sha256:eaaed229742adbd1387383d113350ecd9222fc1e8f5611a9395a058c1eee4377" + ], + "index": "pypi", + "version": "==2.3.2" + }, + "mypy": { + "hashes": [ + "sha256:0714258640194d75677e86c786e80ccf294972cc76885d3ebbb560f11db0003d", + "sha256:0c8f3be99e8a8bd403caa8c03be619544bc2c77a7093685dcf308c6b109426c6", + "sha256:0cca5adf694af539aeaa6ac633a7afe9bbd760df9d31be55ab780b77ab5ae8bf", + "sha256:1c8cd4fb70e8584ca1ed5805cbc7c017a3d1a29fb450621089ffed3e99d1857f", + "sha256:1f7d1a520373e2272b10796c3ff721ea1a0712288cafaa95931e66aa15798813", + "sha256:209ee89fbb0deed518605edddd234af80506aec932ad28d73c08f1400ef80a33", + "sha256:26efb2fcc6b67e4d5a55561f39176821d2adf88f2745ddc72751b7890f3194ad", + "sha256:37bd02ebf9d10e05b00d71302d2c2e6ca333e6c2a8584a98c00e038db8121f05", + "sha256:3a700330b567114b673cf8ee7388e949f843b356a73b5ab22dd7cff4742a5297", + "sha256:3c0165ba8f354a6d9881809ef29f1a9318a236a6d81c690094c5df32107bde06", + "sha256:3d80e36b7d7a9259b740be6d8d906221789b0d836201af4234093cae89ced0cd", + "sha256:4175593dc25d9da12f7de8de873a33f9b2b8bdb4e827a7cae952e5b1a342e243", + "sha256:4307270436fd7694b41f913eb09210faff27ea4979ecbcd849e57d2da2f65305", + "sha256:5e80e758243b97b618cdf22004beb09e8a2de1af481382e4d84bc52152d1c476", + "sha256:641411733b127c3e0dab94c45af15fea99e4468f99ac88b39efb1ad677da5711", + "sha256:652b651d42f155033a1967739788c436491b577b6a44e4c39fb340d0ee7f0d70", + "sha256:6d7464bac72a85cb3491c7e92b5b62f3dcccb8af26826257760a552a5e244aa5", + "sha256:74e259b5c19f70d35fcc1ad3d56499065c601dfe94ff67ae48b85596b9ec1461", + "sha256:7d17e0a9707d0772f4a7b878f04b4fd11f6f5bcb9b3813975a9b13c9332153ab", + "sha256:901c2c269c616e6cb0998b33d4adbb4a6af0ac4ce5cd078afd7bc95830e62c1c", + "sha256:98e781cd35c0acf33eb0295e8b9c55cdbef64fcb35f6d3aa2186f289bed6e80d", + "sha256:a12c56bf73cdab116df96e4ff39610b92a348cc99a1307e1da3c3768bbb5b135", + "sha256:ac6e503823143464538efda0e8e356d871557ef60ccd38f8824a4257acc18d93", + "sha256:b8472f736a5bfb159a5e36740847808f6f5b659960115ff29c7cecec1741c648", + "sha256:b86ce2c1866a748c0f6faca5232059f881cda6dda2a893b9a8373353cfe3715a", + "sha256:bc9ec663ed6c8f15f4ae9d3c04c989b744436c16d26580eaa760ae9dd5d662eb", + "sha256:c9166b3f81a10cdf9b49f2d594b21b31adadb3d5e9db9b834866c3258b695be3", + "sha256:d13674f3fb73805ba0c45eb6c0c3053d218aa1f7abead6e446d474529aafc372", + "sha256:de32edc9b0a7e67c2775e574cb061a537660e51210fbf6006b0b36ea695ae9bb", + "sha256:e62ebaad93be3ad1a828a11e90f0e76f15449371ffeecca4a0a0b9adc99abcef" + ], + "index": "pypi", + "version": "==0.991" + }, + "mypy-extensions": { + "hashes": [ + "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d", + "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782" + ], + "markers": "python_version >= '3.5'", + "version": "==1.0.0" + }, + "packaging": { + "hashes": [ + "sha256:714ac14496c3e68c99c29b00845f7a2b85f3bb6f1078fd9f72fd20f0570002b2", + "sha256:b6ad297f8907de0fa2fe1ccbd26fdaf387f5f47c7275fedf8cce89f99446cf97" + ], + "markers": "python_version >= '3.7'", + "version": "==23.0" + }, + "pathspec": { + "hashes": [ + "sha256:2798de800fa92780e33acca925945e9a19a133b715067cf165b8866c15a31687", + "sha256:d8af70af76652554bd134c22b3e8a1cc46ed7d91edcdd721ef1a0c51a84a5293" + ], + "markers": "python_version >= '3.7'", + "version": "==0.11.1" + }, + "pbr": { + "hashes": [ + "sha256:567f09558bae2b3ab53cb3c1e2e33e726ff3338e7bae3db5dc954b3a44eef12b", + "sha256:aefc51675b0b533d56bb5fd1c8c6c0522fe31896679882e1c4c63d5e4a0fccb3" + ], + "markers": "python_version >= '2.6'", + "version": "==5.11.1" + }, + "pdoc": { + "hashes": [ + "sha256:681a2f243e4ca51bedd0645c2d18275b8b83444e9b6e42b502882ec45369e679" + ], + "index": "pypi", + "version": "==7.4.0" + }, + "pkginfo": { + "hashes": [ + "sha256:4b7a555a6d5a22169fcc9cf7bfd78d296b0361adad412a346c1226849af5e546", + "sha256:8fd5896e8718a4372f0ea9cc9d96f6417c9b986e23a4d116dda26b62cc29d046" + ], + "markers": "python_version >= '3.6'", + "version": "==1.9.6" + }, + "platformdirs": { + "hashes": [ + "sha256:d5b638ca397f25f979350ff789db335903d7ea010ab28903f57b27e1b16c2b08", + "sha256:ebe11c0d7a805086e99506aa331612429a72ca7cd52a1f0d277dc4adc20cb10e" + ], + "markers": "python_version >= '3.7'", + "version": "==3.2.0" + }, + "pluggy": { + "hashes": [ + "sha256:4224373bacce55f955a878bf9cfa763c1e360858e330072059e10bad68531159", + "sha256:74134bbf457f031a36d68416e1509f34bd5ccc019f0bcc952c7b909d06b37bd3" + ], + "markers": "python_version >= '3.6'", + "version": "==1.0.0" + }, + "publication": { + "hashes": [ + "sha256:0248885351febc11d8a1098d5c8e3ab2dabcf3e8c0c96db1e17ecd12b53afbe6", + "sha256:68416a0de76dddcdd2930d1c8ef853a743cc96c82416c4e4d3b5d901c6276dc4" + ], + "version": "==0.0.3" + }, + "py": { + "hashes": [ + "sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719", + "sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378" + ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'", + "version": "==1.11.0" + }, + "pycparser": { + "hashes": [ + "sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9", + "sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206" + ], + "version": "==2.21" + }, + "pygments": { + "hashes": [ + "sha256:b3ed06a9e8ac9a9aae5a6f5dbe78a8a58655d17b43b93c078f094ddc476ae297", + "sha256:fa7bd7bd2771287c0de303af8bfdfc731f51bd2c6a47ab69d117138893b82717" + ], + "markers": "python_version >= '3.6'", + "version": "==2.14.0" + }, + "pylint": { + "hashes": [ + "sha256:001cc91366a7df2970941d7e6bbefcbf98694e00102c1f121c531a814ddc2ea8", + "sha256:1b647da5249e7c279118f657ca28b6aaebb299f86bf92affc632acf199f7adbb" + ], + "index": "pypi", + "version": "==2.17.2" + }, + "pylint-json2html": { + "hashes": [ + "sha256:65f37c5289ff05f998251487519f58c4fc9b52cd1fc09e8fc5ad75fff7aacfd6", + "sha256:79e681b6df76bb0b3d1a0c753cd3286d243de50905b4a9b63d0f17e5713dee7c" + ], + "index": "pypi", + "version": "==0.4.0" + }, + "pylint-pytest": { + "hashes": [ + "sha256:fb20ef318081cee3d5febc631a7b9c40fa356b05e4f769d6e60a337e58c8879b" + ], + "index": "pypi", + "version": "==1.1.2" + }, + "pymdown-extensions": { + "hashes": [ + "sha256:31eaa76ce6f96aabfcea98787c2fff2c5c0611b20a53a94213970cfbf05f02b8", + "sha256:562c38eee4ce3f101ce631b804bfc2177a8a76c7e4dc908871fb6741a90257a7" + ], + "markers": "python_version >= '3.7'", + "version": "==9.10" + }, + "pytest": { + "hashes": [ + "sha256:131b36680866a76e6781d13f101efb86cf674ebb9762eb70d3082b6f29889e89", + "sha256:7310f8d27bc79ced999e760ca304d69f6ba6c6649c0b60fb0e04a4a77cacc134" + ], + "index": "pypi", + "version": "==6.2.5" + }, + "pytest-asyncio": { + "hashes": [ + "sha256:83cbf01169ce3e8eb71c6c278ccb0574d1a7a3bb8eaaf5e50e0ad342afb33b36", + "sha256:f129998b209d04fcc65c96fc85c11e5316738358909a8399e93be553d7656442" + ], + "index": "pypi", + "version": "==0.20.3" + }, + "pytest-console-scripts": { + "hashes": [ + "sha256:21063b2e32df96da51412116e654babb1447a415929158d81d445667de9ea946", + "sha256:5c6c3daae9cf9fbed0e655072128938600193dc002a5cf1b187248644ba02857" + ], + "index": "pypi", + "version": "==1.3.1" + }, + "pytest-cov": { + "hashes": [ + "sha256:261bb9e47e65bd099c89c3edf92972865210c36813f80ede5277dceb77a4a62a", + "sha256:261ceeb8c227b726249b376b8526b600f38667ee314f910353fa318caa01f4d7" + ], + "index": "pypi", + "version": "==2.12.1" + }, + "pytest-env": { + "hashes": [ + "sha256:7e94956aef7f2764f3c147d216ce066bf6c42948bb9e293169b1b1c880a580c2" + ], + "index": "pypi", + "version": "==0.6.2" + }, + "pytest-httpx": { + "hashes": [ + "sha256:50b52b910f6f6cfb0aa65039d6f5bedb6ae3a0c02a98c4a7187543fe437c428a", + "sha256:edcb62baceffbd57753c1a7afc4656b0e71e91c7a512e143c0adbac762d979c1" + ], + "index": "pypi", + "version": "==0.21.3" + }, + "pytest-mock": { + "hashes": [ + "sha256:f4c973eeae0282963eb293eb173ce91b091a79c1334455acfac9ddee8a1c784b", + "sha256:fbbdb085ef7c252a326fd8cdcac0aa3b1333d8811f131bdcc701002e1be7ed4f" + ], + "index": "pypi", + "version": "==3.10.0" + }, + "pytest-reportlog": { + "hashes": [ + "sha256:65ac38cb5af90470df3dde6c03a6dd88090913d16765ee54d135279b5579c113", + "sha256:df59f7f1fcd9a0388e39b30e5aa264a609e64953e116f3ea6eb3aab22e3658e6" + ], + "index": "pypi", + "version": "==0.2.1" + }, + "python-dateutil": { + "hashes": [ + "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86", + "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9" + ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2'", + "version": "==2.8.2" + }, + "pytz": { + "hashes": [ + "sha256:1d8ce29db189191fb55338ee6d0387d82ab59f3d00eac103412d64e0ebd0c588", + "sha256:a151b3abb88eda1d4e34a9814df37de2a80e301e68ba0fd856fb9b46bfbbbffb" + ], + "version": "==2023.3" + }, + "pyyaml": { + "hashes": [ + "sha256:01b45c0191e6d66c470b6cf1b9531a771a83c1c4208272ead47a3ae4f2f603bf", + "sha256:0283c35a6a9fbf047493e3a0ce8d79ef5030852c51e9d911a27badfde0605293", + "sha256:055d937d65826939cb044fc8c9b08889e8c743fdc6a32b33e2390f66013e449b", + "sha256:07751360502caac1c067a8132d150cf3d61339af5691fe9e87803040dbc5db57", + "sha256:0b4624f379dab24d3725ffde76559cff63d9ec94e1736b556dacdfebe5ab6d4b", + "sha256:0ce82d761c532fe4ec3f87fc45688bdd3a4c1dc5e0b4a19814b9009a29baefd4", + "sha256:1e4747bc279b4f613a09eb64bba2ba602d8a6664c6ce6396a4d0cd413a50ce07", + "sha256:213c60cd50106436cc818accf5baa1aba61c0189ff610f64f4a3e8c6726218ba", + "sha256:231710d57adfd809ef5d34183b8ed1eeae3f76459c18fb4a0b373ad56bedcdd9", + "sha256:277a0ef2981ca40581a47093e9e2d13b3f1fbbeffae064c1d21bfceba2030287", + "sha256:2cd5df3de48857ed0544b34e2d40e9fac445930039f3cfe4bcc592a1f836d513", + "sha256:40527857252b61eacd1d9af500c3337ba8deb8fc298940291486c465c8b46ec0", + "sha256:432557aa2c09802be39460360ddffd48156e30721f5e8d917f01d31694216782", + "sha256:473f9edb243cb1935ab5a084eb238d842fb8f404ed2193a915d1784b5a6b5fc0", + "sha256:48c346915c114f5fdb3ead70312bd042a953a8ce5c7106d5bfb1a5254e47da92", + "sha256:50602afada6d6cbfad699b0c7bb50d5ccffa7e46a3d738092afddc1f9758427f", + "sha256:68fb519c14306fec9720a2a5b45bc9f0c8d1b9c72adf45c37baedfcd949c35a2", + "sha256:77f396e6ef4c73fdc33a9157446466f1cff553d979bd00ecb64385760c6babdc", + "sha256:81957921f441d50af23654aa6c5e5eaf9b06aba7f0a19c18a538dc7ef291c5a1", + "sha256:819b3830a1543db06c4d4b865e70ded25be52a2e0631ccd2f6a47a2822f2fd7c", + "sha256:897b80890765f037df3403d22bab41627ca8811ae55e9a722fd0392850ec4d86", + "sha256:98c4d36e99714e55cfbaaee6dd5badbc9a1ec339ebfc3b1f52e293aee6bb71a4", + "sha256:9df7ed3b3d2e0ecfe09e14741b857df43adb5a3ddadc919a2d94fbdf78fea53c", + "sha256:9fa600030013c4de8165339db93d182b9431076eb98eb40ee068700c9c813e34", + "sha256:a80a78046a72361de73f8f395f1f1e49f956c6be882eed58505a15f3e430962b", + "sha256:afa17f5bc4d1b10afd4466fd3a44dc0e245382deca5b3c353d8b757f9e3ecb8d", + "sha256:b3d267842bf12586ba6c734f89d1f5b871df0273157918b0ccefa29deb05c21c", + "sha256:b5b9eccad747aabaaffbc6064800670f0c297e52c12754eb1d976c57e4f74dcb", + "sha256:bfaef573a63ba8923503d27530362590ff4f576c626d86a9fed95822a8255fd7", + "sha256:c5687b8d43cf58545ade1fe3e055f70eac7a5a1a0bf42824308d868289a95737", + "sha256:cba8c411ef271aa037d7357a2bc8f9ee8b58b9965831d9e51baf703280dc73d3", + "sha256:d15a181d1ecd0d4270dc32edb46f7cb7733c7c508857278d3d378d14d606db2d", + "sha256:d4b0ba9512519522b118090257be113b9468d804b19d63c71dbcf4a48fa32358", + "sha256:d4db7c7aef085872ef65a8fd7d6d09a14ae91f691dec3e87ee5ee0539d516f53", + "sha256:d4eccecf9adf6fbcc6861a38015c2a64f38b9d94838ac1810a9023a0609e1b78", + "sha256:d67d839ede4ed1b28a4e8909735fc992a923cdb84e618544973d7dfc71540803", + "sha256:daf496c58a8c52083df09b80c860005194014c3698698d1a57cbcfa182142a3a", + "sha256:dbad0e9d368bb989f4515da330b88a057617d16b6a8245084f1b05400f24609f", + "sha256:e61ceaab6f49fb8bdfaa0f92c4b57bcfbea54c09277b1b4f7ac376bfb7a7c174", + "sha256:f84fbc98b019fef2ee9a1cb3ce93e3187a6df0b2538a651bfb890254ba9f90b5" + ], + "markers": "python_version >= '3.6'", + "version": "==6.0" + }, + "pyyaml-env-tag": { + "hashes": [ + "sha256:70092675bda14fdec33b31ba77e7543de9ddc88f2e5b99160396572d11525bdb", + "sha256:af31106dec8a4d68c60207c1886031cbf839b68aa7abccdb19868200532c2069" + ], + "markers": "python_version >= '3.6'", + "version": "==0.1" + }, + "rdk": { + "editable": true, + "path": "." + }, + "readme-renderer": { + "hashes": [ + "sha256:cd653186dfc73055656f090f227f5cb22a046d7f71a841dfa305f55c9a513273", + "sha256:f67a16caedfa71eef48a31b39708637a6f4664c4394801a7b0d6432d13907343" + ], + "markers": "python_version >= '3.7'", + "version": "==37.3" + }, + "regex": { + "hashes": [ + "sha256:086afe222d58b88b62847bdbd92079b4699350b4acab892f88a935db5707c790", + "sha256:0b8eb1e3bca6b48dc721818a60ae83b8264d4089a4a41d62be6d05316ec38e15", + "sha256:11d00c31aeab9a6e0503bc77e73ed9f4527b3984279d997eb145d7c7be6268fd", + "sha256:11d1f2b7a0696dc0310de0efb51b1f4d813ad4401fe368e83c0c62f344429f98", + "sha256:1b1fc2632c01f42e06173d8dd9bb2e74ab9b0afa1d698058c867288d2c7a31f3", + "sha256:20abe0bdf03630fe92ccafc45a599bca8b3501f48d1de4f7d121153350a2f77d", + "sha256:22720024b90a6ba673a725dcc62e10fb1111b889305d7c6b887ac7466b74bedb", + "sha256:2472428efc4127374f494e570e36b30bb5e6b37d9a754f7667f7073e43b0abdd", + "sha256:25f0532fd0c53e96bad84664171969de9673b4131f2297f1db850d3918d58858", + "sha256:2848bf76673c83314068241c8d5b7fa9ad9bed866c979875a0e84039349e8fa7", + "sha256:37ae17d3be44c0b3f782c28ae9edd8b47c1f1776d4cabe87edc0b98e1f12b021", + "sha256:3cd9f5dd7b821f141d3a6ca0d5d9359b9221e4f051ca3139320adea9f1679691", + "sha256:4479f9e2abc03362df4045b1332d4a2b7885b245a30d4f4b051c4083b97d95d8", + "sha256:4c49552dc938e3588f63f8a78c86f3c9c75301e813bca0bef13bdb4b87ccf364", + "sha256:539dd010dc35af935b32f248099e38447bbffc10b59c2b542bceead2bed5c325", + "sha256:54c3fa855a3f7438149de3211738dd9b5f0c733f48b54ae05aa7fce83d48d858", + "sha256:55ae114da21b7a790b90255ea52d2aa3a0d121a646deb2d3c6a3194e722fc762", + "sha256:5ccfafd98473e007cebf7da10c1411035b7844f0f204015efd050601906dbb53", + "sha256:5fc33b27b1d800fc5b78d7f7d0f287e35079ecabe68e83d46930cf45690e1c8c", + "sha256:6560776ec19c83f3645bbc5db64a7a5816c9d8fb7ed7201c5bcd269323d88072", + "sha256:6572ff287176c0fb96568adb292674b421fa762153ed074d94b1d939ed92c253", + "sha256:6b190a339090e6af25f4a5fd9e77591f6d911cc7b96ecbb2114890b061be0ac1", + "sha256:7304863f3a652dab5e68e6fb1725d05ebab36ec0390676d1736e0571ebb713ef", + "sha256:75f288c60232a5339e0ff2fa05779a5e9c74e9fc085c81e931d4a264501e745b", + "sha256:7868b8f218bf69a2a15402fde08b08712213a1f4b85a156d90473a6fb6b12b09", + "sha256:787954f541ab95d8195d97b0b8cf1dc304424adb1e07365967e656b92b38a699", + "sha256:78ac8dd8e18800bb1f97aad0d73f68916592dddf233b99d2b5cabc562088503a", + "sha256:79e29fd62fa2f597a6754b247356bda14b866131a22444d67f907d6d341e10f3", + "sha256:845a5e2d84389c4ddada1a9b95c055320070f18bb76512608374aca00d22eca8", + "sha256:86b036f401895e854de9fefe061518e78d506d8a919cc250dc3416bca03f6f9a", + "sha256:87d9951f5a538dd1d016bdc0dcae59241d15fa94860964833a54d18197fcd134", + "sha256:8a9c63cde0eaa345795c0fdeb19dc62d22e378c50b0bc67bf4667cd5b482d98b", + "sha256:93f3f1aa608380fe294aa4cb82e2afda07a7598e828d0341e124b8fd9327c715", + "sha256:9bf4a5626f2a0ea006bf81e8963f498a57a47d58907eaa58f4b3e13be68759d8", + "sha256:9d764514d19b4edcc75fd8cb1423448ef393e8b6cbd94f38cab983ab1b75855d", + "sha256:a610e0adfcb0fc84ea25f6ea685e39e74cbcd9245a72a9a7aab85ff755a5ed27", + "sha256:a81c9ec59ca2303acd1ccd7b9ac409f1e478e40e96f8f79b943be476c5fdb8bb", + "sha256:b7006105b10b59971d3b248ad75acc3651c7e4cf54d81694df5a5130a3c3f7ea", + "sha256:c07ce8e9eee878a48ebeb32ee661b49504b85e164b05bebf25420705709fdd31", + "sha256:c125a02d22c555e68f7433bac8449992fa1cead525399f14e47c2d98f2f0e467", + "sha256:c37df2a060cb476d94c047b18572ee2b37c31f831df126c0da3cd9227b39253d", + "sha256:c869260aa62cee21c5eb171a466c0572b5e809213612ef8d495268cd2e34f20d", + "sha256:c88e8c226473b5549fe9616980ea7ca09289246cfbdf469241edf4741a620004", + "sha256:cd1671e9d5ac05ce6aa86874dd8dfa048824d1dbe73060851b310c6c1a201a96", + "sha256:cde09c4fdd070772aa2596d97e942eb775a478b32459e042e1be71b739d08b77", + "sha256:cf86b4328c204c3f315074a61bc1c06f8a75a8e102359f18ce99fbcbbf1951f0", + "sha256:d5bbe0e1511b844794a3be43d6c145001626ba9a6c1db8f84bdc724e91131d9d", + "sha256:d895b4c863059a4934d3e874b90998df774644a41b349ebb330f85f11b4ef2c0", + "sha256:db034255e72d2995cf581b14bb3fc9c00bdbe6822b49fcd4eef79e1d5f232618", + "sha256:dbb3f87e15d3dd76996d604af8678316ad2d7d20faa394e92d9394dfd621fd0c", + "sha256:dc80df325b43ffea5cdea2e3eaa97a44f3dd298262b1c7fe9dbb2a9522b956a7", + "sha256:dd7200b4c27b68cf9c9646da01647141c6db09f48cc5b51bc588deaf8e98a797", + "sha256:df45fac182ebc3c494460c644e853515cc24f5ad9da05f8ffb91da891bfee879", + "sha256:e152461e9a0aedec7d37fc66ec0fa635eca984777d3d3c3e36f53bf3d3ceb16e", + "sha256:e2396e0678167f2d0c197da942b0b3fb48fee2f0b5915a0feb84d11b6686afe6", + "sha256:e76b6fc0d8e9efa39100369a9b3379ce35e20f6c75365653cf58d282ad290f6f", + "sha256:ea3c0cb56eadbf4ab2277e7a095676370b3e46dbfc74d5c383bd87b0d6317910", + "sha256:ef3f528fe1cc3d139508fe1b22523745aa77b9d6cb5b0bf277f48788ee0b993f", + "sha256:fdf7ad455f1916b8ea5cdbc482d379f6daf93f3867b4232d14699867a5a13af7", + "sha256:fffe57312a358be6ec6baeb43d253c36e5790e436b7bf5b7a38df360363e88e9" + ], + "markers": "python_version >= '3.8'", + "version": "==2023.3.23" + }, + "requests": { + "hashes": [ + "sha256:64299f4909223da747622c030b781c0d7811e359c37124b4bd368fb8c6518baa", + "sha256:98b1b2782e3c6c4904938b84c0eb932721069dfdb9134313beff7c83c2df24bf" + ], + "markers": "python_version >= '3.7' and python_version < '4'", + "version": "==2.28.2" + }, + "requests-toolbelt": { + "hashes": [ + "sha256:18565aa58116d9951ac39baa288d3adb5b3ff975c4f25eee78555d89e8f247f7", + "sha256:62e09f7ff5ccbda92772a29f394a49c3ad6cb181d568b1337626b2abb628a63d" + ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", + "version": "==0.10.1" + }, + "responses": { + "hashes": [ + "sha256:8a3a5915713483bf353b6f4079ba8b2a29029d1d1090a503c70b0dc5d9d0c7bd", + "sha256:c4d9aa9fc888188f0c673eff79a8dadbe2e75b7fe879dc80a221a06e0a68138f" + ], + "markers": "python_version >= '3.7'", + "version": "==0.23.1" + }, + "rfc3986": { + "extras": [ + "idna2008" + ], + "hashes": [ + "sha256:270aaf10d87d0d4e095063c65bf3ddbc6ee3d0b226328ce21e036f946e421835", + "sha256:a86d6e1f5b1dc238b218b012df0aa79409667bb209e58da56d0b94704e712a97" + ], + "version": "==1.5.0" + }, + "rich": { + "hashes": [ + "sha256:540c7d6d26a1178e8e8b37e9ba44573a3cd1464ff6348b99ee7061b95d1c6333", + "sha256:dc84400a9d842b3a9c5ff74addd8eb798d155f36c1c91303888e0a66850d2a15" + ], + "markers": "python_full_version >= '3.7.0'", + "version": "==13.3.3" + }, + "ruamel.yaml": { + "hashes": [ + "sha256:742b35d3d665023981bd6d16b3d24248ce5df75fdb4e2924e93a05c1f8b61ca7", + "sha256:8b7ce697a2f212752a35c1ac414471dc16c424c9573be4926b56ff3f5d23b7af" + ], + "markers": "python_version >= '3'", + "version": "==0.17.21" + }, + "ruamel.yaml.clib": { + "hashes": [ + "sha256:045e0626baf1c52e5527bd5db361bc83180faaba2ff586e763d3d5982a876a9e", + "sha256:15910ef4f3e537eea7fe45f8a5d19997479940d9196f357152a09031c5be59f3", + "sha256:184faeaec61dbaa3cace407cffc5819f7b977e75360e8d5ca19461cd851a5fc5", + "sha256:1f08fd5a2bea9c4180db71678e850b995d2a5f4537be0e94557668cf0f5f9497", + "sha256:2aa261c29a5545adfef9296b7e33941f46aa5bbd21164228e833412af4c9c75f", + "sha256:3110a99e0f94a4a3470ff67fc20d3f96c25b13d24c6980ff841e82bafe827cac", + "sha256:3243f48ecd450eddadc2d11b5feb08aca941b5cd98c9b1db14b2fd128be8c697", + "sha256:370445fd795706fd291ab00c9df38a0caed0f17a6fb46b0f607668ecb16ce763", + "sha256:40d030e2329ce5286d6b231b8726959ebbe0404c92f0a578c0e2482182e38282", + "sha256:41d0f1fa4c6830176eef5b276af04c89320ea616655d01327d5ce65e50575c94", + "sha256:4a4d8d417868d68b979076a9be6a38c676eca060785abaa6709c7b31593c35d1", + "sha256:4b3a93bb9bc662fc1f99c5c3ea8e623d8b23ad22f861eb6fce9377ac07ad6072", + "sha256:5bc0667c1eb8f83a3752b71b9c4ba55ef7c7058ae57022dd9b29065186a113d9", + "sha256:721bc4ba4525f53f6a611ec0967bdcee61b31df5a56801281027a3a6d1c2daf5", + "sha256:763d65baa3b952479c4e972669f679fe490eee058d5aa85da483ebae2009d231", + "sha256:7bdb4c06b063f6fd55e472e201317a3bb6cdeeee5d5a38512ea5c01e1acbdd93", + "sha256:8831a2cedcd0f0927f788c5bdf6567d9dc9cc235646a434986a852af1cb54b4b", + "sha256:91a789b4aa0097b78c93e3dc4b40040ba55bef518f84a40d4442f713b4094acb", + "sha256:92460ce908546ab69770b2e576e4f99fbb4ce6ab4b245345a3869a0a0410488f", + "sha256:99e77daab5d13a48a4054803d052ff40780278240a902b880dd37a51ba01a307", + "sha256:a234a20ae07e8469da311e182e70ef6b199d0fbeb6c6cc2901204dd87fb867e8", + "sha256:a7b301ff08055d73223058b5c46c55638917f04d21577c95e00e0c4d79201a6b", + "sha256:be2a7ad8fd8f7442b24323d24ba0b56c51219513cfa45b9ada3b87b76c374d4b", + "sha256:bf9a6bc4a0221538b1a7de3ed7bca4c93c02346853f44e1cd764be0023cd3640", + "sha256:c3ca1fbba4ae962521e5eb66d72998b51f0f4d0f608d3c0347a48e1af262efa7", + "sha256:d000f258cf42fec2b1bbf2863c61d7b8918d31ffee905da62dede869254d3b8a", + "sha256:d5859983f26d8cd7bb5c287ef452e8aacc86501487634573d260968f753e1d71", + "sha256:d5e51e2901ec2366b79f16c2299a03e74ba4531ddcfacc1416639c557aef0ad8", + "sha256:da538167284de58a52109a9b89b8f6a53ff8437dd6dc26d33b57bf6699153122", + "sha256:debc87a9516b237d0466a711b18b6ebeb17ba9f391eb7f91c649c5c4ec5006c7", + "sha256:df5828871e6648db72d1c19b4bd24819b80a755c4541d3409f0f7acd0f335c80", + "sha256:ecdf1a604009bd35c674b9225a8fa609e0282d9b896c03dd441a91e5f53b534e", + "sha256:efa08d63ef03d079dcae1dfe334f6c8847ba8b645d08df286358b1f5293d24ab", + "sha256:f01da5790e95815eb5a8a138508c01c758e5f5bc0ce4286c4f7028b8dd7ac3d0", + "sha256:f34019dced51047d6f70cb9383b2ae2853b7fc4dce65129a5acd49f4f9256646", + "sha256:f6d3d39611ac2e4f62c3128a9eed45f19a6608670c5a2f4f07f24e8de3441d38" + ], + "markers": "platform_python_implementation == 'CPython' and python_version < '3.11'", + "version": "==0.2.7" + }, + "s3transfer": { + "hashes": [ + "sha256:06176b74f3a15f61f1b4f25a1fc29a4429040b7647133a463da8fa5bd28d5ecd", + "sha256:2ed07d3866f523cc561bf4a00fc5535827981b117dd7876f036b0c1aca42c947" + ], + "markers": "python_version >= '3.7'", + "version": "==0.6.0" + }, + "semver": { + "hashes": [ + "sha256:ced8b23dceb22134307c1b8abfa523da14198793d9787ac838e70e29e77458d4", + "sha256:fa0fe2722ee1c3f57eac478820c3a5ae2f624af8264cbdf9000c980ff7f75e3f" + ], + "index": "pypi", + "version": "==2.13.0" + }, + "sh": { + "hashes": [ + "sha256:e4045b6c732d9ce75d571c79f5ac2234edd9ae4f5fa9d59b09705082bdca18c7" + ], + "markers": "sys_platform != 'win32'", + "version": "==1.14.3" + }, + "six": { + "hashes": [ + "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926", + "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254" + ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2'", + "version": "==1.16.0" + }, + "smmap": { + "hashes": [ + "sha256:2aba19d6a040e78d8b09de5c57e96207b09ed71d8e55ce0959eeee6c8e190d94", + "sha256:c840e62059cd3be204b0c9c9f74be2c09d5648eddd4580d9314c3ecde0b30936" + ], + "markers": "python_version >= '3.6'", + "version": "==5.0.0" + }, + "sniffio": { + "hashes": [ + "sha256:e60305c5e5d314f5389259b7f22aaa33d8f7dee49763119234af3755c55b9101", + "sha256:eecefdce1e5bbfb7ad2eeaabf7c1eeb404d7757c379bd1f7e5cce9d8bf425384" + ], + "markers": "python_version >= '3.7'", + "version": "==1.3.0" + }, + "stevedore": { + "hashes": [ + "sha256:2c428d2338976279e8eb2196f7a94910960d9f7ba2f41f3988511e95ca447021", + "sha256:bd5a71ff5e5e5f5ea983880e4a1dd1bb47f8feebbb3d95b592398e2f02194771" + ], + "markers": "python_version >= '3.8'", + "version": "==5.0.0" + }, + "toml": { + "hashes": [ + "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b", + "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f" + ], + "markers": "python_version >= '2.6' and python_version not in '3.0, 3.1, 3.2'", + "version": "==0.10.2" + }, + "tomli": { + "hashes": [ + "sha256:05b6166bff487dc068d322585c7ea4ef78deed501cc124060e0f238e89a9231f", + "sha256:e3069e4be3ead9668e21cb9b074cd948f7b3113fd9c8bba083f48247aab8b11c" + ], + "markers": "python_version >= '3.6'", + "version": "==1.2.3" + }, + "tomlkit": { + "hashes": [ + "sha256:5325463a7da2ef0c6bbfefb62a3dc883aebe679984709aee32a317907d0a8d3c", + "sha256:f392ef70ad87a672f02519f99967d28a4d3047133e2d1df936511465fbb3791d" + ], + "markers": "python_version >= '3.7'", + "version": "==0.11.7" + }, + "tqdm": { + "hashes": [ + "sha256:1871fb68a86b8fb3b59ca4cdd3dcccbc7e6d613eeed31f4c332531977b89beb5", + "sha256:c4f53a17fe37e132815abceec022631be8ffe1b9381c2e6e30aa70edc99e9671" + ], + "markers": "python_version >= '3.7'", + "version": "==4.65.0" + }, + "twine": { + "hashes": [ + "sha256:8efa52658e0ae770686a13b675569328f1fba9837e5de1867bfe5f46a9aefe19", + "sha256:d0550fca9dc19f3d5e8eadfce0c227294df0a2a951251a4385797c8a6198b7c8" + ], + "index": "pypi", + "version": "==3.8.0" + }, + "typeguard": { + "hashes": [ + "sha256:00edaa8da3a133674796cf5ea87d9f4b4c367d77476e185e80251cc13dfbb8c4", + "sha256:5e3e3be01e887e7eafae5af63d1f36c849aaa94e3a0112097312aabfa16284f1" + ], + "markers": "python_full_version >= '3.5.3'", + "version": "==2.13.3" + }, + "types-aiofiles": { + "hashes": [ + "sha256:1f93aa68e47de1379f45eef9acd34faa0f9341628921cd6aede666e6e559a5a8", + "sha256:be6715fffd1c7f84c9316000ba8bbc66a884246dbd2902c163ebc2d67315206b" + ], + "index": "pypi", + "version": "==0.8.11" + }, + "types-colorama": { + "hashes": [ + "sha256:a9421eb24d9cfc584880dc1d33b7fd406a14227c1f99f50c5ab9265e04d07638", + "sha256:d1e37571a19e152c930b3e789c316e9332e51a43bfcd4470b98225be974fb90c" + ], + "version": "==0.4.15.11" + }, + "types-pyyaml": { + "hashes": [ + "sha256:5aed5aa66bd2d2e158f75dda22b059570ede988559f030cf294871d3b647e3e8", + "sha256:c51b1bd6d99ddf0aa2884a7a328810ebf70a4262c292195d3f4f9a0005f9eeb6" + ], + "version": "==6.0.12.9" + }, + "types-setuptools": { + "hashes": [ + "sha256:8ee03d823fe7fda0bd35faeae33d35cb5c25b497263e6a58b34c4cfd05f40bcf", + "sha256:9660b8774b12cd61b448e2fd87a667c02e7ec13ce9f15171f1d49a4654c4df6a" + ], + "version": "==57.4.18" + }, + "typing-extensions": { + "hashes": [ + "sha256:5cb5f4a79139d699607b3ef622a1dedafa84e115ab0024e0d9c044a9479ca7cb", + "sha256:fb33085c39dd998ac16d1431ebc293a8b3eedd00fd4a32de0ff79002c19511b4" + ], + "markers": "python_version >= '3.7'", + "version": "==4.5.0" + }, + "untokenize": { + "hashes": [ + "sha256:3865dbbbb8efb4bb5eaa72f1be7f3e0be00ea8b7f125c69cbd1f5fda926f37a2" + ], + "version": "==0.1.1" + }, + "urllib3": { + "hashes": [ + "sha256:8a388717b9476f934a21484e8c8e61875ab60644d29b9b39e11e4b9dc1c6b305", + "sha256:aa751d169e23c7479ce47a0cb0da579e3ede798f994f5816a74e4f4500dcea42" + ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4, 3.5'", + "version": "==1.26.15" + }, + "watchdog": { + "hashes": [ + "sha256:0e06ab8858a76e1219e68c7573dfeba9dd1c0219476c5a44d5333b01d7e1743a", + "sha256:13bbbb462ee42ec3c5723e1205be8ced776f05b100e4737518c67c8325cf6100", + "sha256:233b5817932685d39a7896b1090353fc8efc1ef99c9c054e46c8002561252fb8", + "sha256:25f70b4aa53bd743729c7475d7ec41093a580528b100e9a8c5b5efe8899592fc", + "sha256:2b57a1e730af3156d13b7fdddfc23dea6487fceca29fc75c5a868beed29177ae", + "sha256:336adfc6f5cc4e037d52db31194f7581ff744b67382eb6021c868322e32eef41", + "sha256:3aa7f6a12e831ddfe78cdd4f8996af9cf334fd6346531b16cec61c3b3c0d8da0", + "sha256:3ed7c71a9dccfe838c2f0b6314ed0d9b22e77d268c67e015450a29036a81f60f", + "sha256:4c9956d27be0bb08fc5f30d9d0179a855436e655f046d288e2bcc11adfae893c", + "sha256:4d98a320595da7a7c5a18fc48cb633c2e73cda78f93cac2ef42d42bf609a33f9", + "sha256:4f94069eb16657d2c6faada4624c39464f65c05606af50bb7902e036e3219be3", + "sha256:5113334cf8cf0ac8cd45e1f8309a603291b614191c9add34d33075727a967709", + "sha256:51f90f73b4697bac9c9a78394c3acbbd331ccd3655c11be1a15ae6fe289a8c83", + "sha256:5d9f3a10e02d7371cd929b5d8f11e87d4bad890212ed3901f9b4d68767bee759", + "sha256:7ade88d0d778b1b222adebcc0927428f883db07017618a5e684fd03b83342bd9", + "sha256:7c5f84b5194c24dd573fa6472685b2a27cc5a17fe5f7b6fd40345378ca6812e3", + "sha256:7e447d172af52ad204d19982739aa2346245cc5ba6f579d16dac4bfec226d2e7", + "sha256:8ae9cda41fa114e28faf86cb137d751a17ffd0316d1c34ccf2235e8a84365c7f", + "sha256:8f3ceecd20d71067c7fd4c9e832d4e22584318983cabc013dbf3f70ea95de346", + "sha256:9fac43a7466eb73e64a9940ac9ed6369baa39b3bf221ae23493a9ec4d0022674", + "sha256:a70a8dcde91be523c35b2bf96196edc5730edb347e374c7de7cd20c43ed95397", + "sha256:adfdeab2da79ea2f76f87eb42a3ab1966a5313e5a69a0213a3cc06ef692b0e96", + "sha256:ba07e92756c97e3aca0912b5cbc4e5ad802f4557212788e72a72a47ff376950d", + "sha256:c07253088265c363d1ddf4b3cdb808d59a0468ecd017770ed716991620b8f77a", + "sha256:c9d8c8ec7efb887333cf71e328e39cffbf771d8f8f95d308ea4125bf5f90ba64", + "sha256:d00e6be486affb5781468457b21a6cbe848c33ef43f9ea4a73b4882e5f188a44", + "sha256:d429c2430c93b7903914e4db9a966c7f2b068dd2ebdd2fa9b9ce094c7d459f33" + ], + "markers": "python_version >= '3.7'", + "version": "==3.0.0" + }, + "webencodings": { + "hashes": [ + "sha256:a0af1213f3c2226497a97e2b3aa01a7e4bee4f403f95be16fc9acd2947514a78", + "sha256:b36a1c245f2d304965eb4e0a82848379241dc04b865afcc4aab16748587e1923" + ], + "version": "==0.5.1" + }, + "werkzeug": { + "hashes": [ + "sha256:2e1ccc9417d4da358b9de6f174e3ac094391ea1d4fbef2d667865d819dfd0afe", + "sha256:56433961bc1f12533306c624f3be5e744389ac61d722175d543e1751285da612" + ], + "markers": "python_version >= '3.7'", + "version": "==2.2.3" + }, + "wheel": { + "hashes": [ + "sha256:cd1196f3faee2b31968d626e1731c94f99cbdb67cf5a46e4f5656cbee7738873", + "sha256:d236b20e7cb522daf2390fa84c55eea81c5c30190f90f29ae2ca1ad8355bf247" + ], + "markers": "python_version >= '3.7'", + "version": "==0.40.0" + }, + "wrapt": { + "hashes": [ + "sha256:02fce1852f755f44f95af51f69d22e45080102e9d00258053b79367d07af39c0", + "sha256:077ff0d1f9d9e4ce6476c1a924a3332452c1406e59d90a2cf24aeb29eeac9420", + "sha256:078e2a1a86544e644a68422f881c48b84fef6d18f8c7a957ffd3f2e0a74a0d4a", + "sha256:0970ddb69bba00670e58955f8019bec4a42d1785db3faa043c33d81de2bf843c", + "sha256:1286eb30261894e4c70d124d44b7fd07825340869945c79d05bda53a40caa079", + "sha256:21f6d9a0d5b3a207cdf7acf8e58d7d13d463e639f0c7e01d82cdb671e6cb7923", + "sha256:230ae493696a371f1dbffaad3dafbb742a4d27a0afd2b1aecebe52b740167e7f", + "sha256:26458da5653aa5b3d8dc8b24192f574a58984c749401f98fff994d41d3f08da1", + "sha256:2cf56d0e237280baed46f0b5316661da892565ff58309d4d2ed7dba763d984b8", + "sha256:2e51de54d4fb8fb50d6ee8327f9828306a959ae394d3e01a1ba8b2f937747d86", + "sha256:2fbfbca668dd15b744418265a9607baa970c347eefd0db6a518aaf0cfbd153c0", + "sha256:38adf7198f8f154502883242f9fe7333ab05a5b02de7d83aa2d88ea621f13364", + "sha256:3a8564f283394634a7a7054b7983e47dbf39c07712d7b177b37e03f2467a024e", + "sha256:3abbe948c3cbde2689370a262a8d04e32ec2dd4f27103669a45c6929bcdbfe7c", + "sha256:3bbe623731d03b186b3d6b0d6f51865bf598587c38d6f7b0be2e27414f7f214e", + "sha256:40737a081d7497efea35ab9304b829b857f21558acfc7b3272f908d33b0d9d4c", + "sha256:41d07d029dd4157ae27beab04d22b8e261eddfc6ecd64ff7000b10dc8b3a5727", + "sha256:46ed616d5fb42f98630ed70c3529541408166c22cdfd4540b88d5f21006b0eff", + "sha256:493d389a2b63c88ad56cdc35d0fa5752daac56ca755805b1b0c530f785767d5e", + "sha256:4ff0d20f2e670800d3ed2b220d40984162089a6e2c9646fdb09b85e6f9a8fc29", + "sha256:54accd4b8bc202966bafafd16e69da9d5640ff92389d33d28555c5fd4f25ccb7", + "sha256:56374914b132c702aa9aa9959c550004b8847148f95e1b824772d453ac204a72", + "sha256:578383d740457fa790fdf85e6d346fda1416a40549fe8db08e5e9bd281c6a475", + "sha256:58d7a75d731e8c63614222bcb21dd992b4ab01a399f1f09dd82af17bbfc2368a", + "sha256:5c5aa28df055697d7c37d2099a7bc09f559d5053c3349b1ad0c39000e611d317", + "sha256:5fc8e02f5984a55d2c653f5fea93531e9836abbd84342c1d1e17abc4a15084c2", + "sha256:63424c681923b9f3bfbc5e3205aafe790904053d42ddcc08542181a30a7a51bd", + "sha256:64b1df0f83706b4ef4cfb4fb0e4c2669100fd7ecacfb59e091fad300d4e04640", + "sha256:74934ebd71950e3db69960a7da29204f89624dde411afbfb3b4858c1409b1e98", + "sha256:75669d77bb2c071333417617a235324a1618dba66f82a750362eccbe5b61d248", + "sha256:75760a47c06b5974aa5e01949bf7e66d2af4d08cb8c1d6516af5e39595397f5e", + "sha256:76407ab327158c510f44ded207e2f76b657303e17cb7a572ffe2f5a8a48aa04d", + "sha256:76e9c727a874b4856d11a32fb0b389afc61ce8aaf281ada613713ddeadd1cfec", + "sha256:77d4c1b881076c3ba173484dfa53d3582c1c8ff1f914c6461ab70c8428b796c1", + "sha256:780c82a41dc493b62fc5884fb1d3a3b81106642c5c5c78d6a0d4cbe96d62ba7e", + "sha256:7dc0713bf81287a00516ef43137273b23ee414fe41a3c14be10dd95ed98a2df9", + "sha256:7eebcdbe3677e58dd4c0e03b4f2cfa346ed4049687d839adad68cc38bb559c92", + "sha256:896689fddba4f23ef7c718279e42f8834041a21342d95e56922e1c10c0cc7afb", + "sha256:96177eb5645b1c6985f5c11d03fc2dbda9ad24ec0f3a46dcce91445747e15094", + "sha256:96e25c8603a155559231c19c0349245eeb4ac0096fe3c1d0be5c47e075bd4f46", + "sha256:9d37ac69edc5614b90516807de32d08cb8e7b12260a285ee330955604ed9dd29", + "sha256:9ed6aa0726b9b60911f4aed8ec5b8dd7bf3491476015819f56473ffaef8959bd", + "sha256:a487f72a25904e2b4bbc0817ce7a8de94363bd7e79890510174da9d901c38705", + "sha256:a4cbb9ff5795cd66f0066bdf5947f170f5d63a9274f99bdbca02fd973adcf2a8", + "sha256:a74d56552ddbde46c246b5b89199cb3fd182f9c346c784e1a93e4dc3f5ec9975", + "sha256:a89ce3fd220ff144bd9d54da333ec0de0399b52c9ac3d2ce34b569cf1a5748fb", + "sha256:abd52a09d03adf9c763d706df707c343293d5d106aea53483e0ec8d9e310ad5e", + "sha256:abd8f36c99512755b8456047b7be10372fca271bf1467a1caa88db991e7c421b", + "sha256:af5bd9ccb188f6a5fdda9f1f09d9f4c86cc8a539bd48a0bfdc97723970348418", + "sha256:b02f21c1e2074943312d03d243ac4388319f2456576b2c6023041c4d57cd7019", + "sha256:b06fa97478a5f478fb05e1980980a7cdf2712015493b44d0c87606c1513ed5b1", + "sha256:b0724f05c396b0a4c36a3226c31648385deb6a65d8992644c12a4963c70326ba", + "sha256:b130fe77361d6771ecf5a219d8e0817d61b236b7d8b37cc045172e574ed219e6", + "sha256:b56d5519e470d3f2fe4aa7585f0632b060d532d0696c5bdfb5e8319e1d0f69a2", + "sha256:b67b819628e3b748fd3c2192c15fb951f549d0f47c0449af0764d7647302fda3", + "sha256:ba1711cda2d30634a7e452fc79eabcadaffedf241ff206db2ee93dd2c89a60e7", + "sha256:bbeccb1aa40ab88cd29e6c7d8585582c99548f55f9b2581dfc5ba68c59a85752", + "sha256:bd84395aab8e4d36263cd1b9308cd504f6cf713b7d6d3ce25ea55670baec5416", + "sha256:c99f4309f5145b93eca6e35ac1a988f0dc0a7ccf9ccdcd78d3c0adf57224e62f", + "sha256:ca1cccf838cd28d5a0883b342474c630ac48cac5df0ee6eacc9c7290f76b11c1", + "sha256:cd525e0e52a5ff16653a3fc9e3dd827981917d34996600bbc34c05d048ca35cc", + "sha256:cdb4f085756c96a3af04e6eca7f08b1345e94b53af8921b25c72f096e704e145", + "sha256:ce42618f67741d4697684e501ef02f29e758a123aa2d669e2d964ff734ee00ee", + "sha256:d06730c6aed78cee4126234cf2d071e01b44b915e725a6cb439a879ec9754a3a", + "sha256:d5fe3e099cf07d0fb5a1e23d399e5d4d1ca3e6dfcbe5c8570ccff3e9208274f7", + "sha256:d6bcbfc99f55655c3d93feb7ef3800bd5bbe963a755687cbf1f490a71fb7794b", + "sha256:d787272ed958a05b2c86311d3a4135d3c2aeea4fc655705f074130aa57d71653", + "sha256:e169e957c33576f47e21864cf3fc9ff47c223a4ebca8960079b8bd36cb014fd0", + "sha256:e20076a211cd6f9b44a6be58f7eeafa7ab5720eb796975d0c03f05b47d89eb90", + "sha256:e826aadda3cae59295b95343db8f3d965fb31059da7de01ee8d1c40a60398b29", + "sha256:eef4d64c650f33347c1f9266fa5ae001440b232ad9b98f1f43dfe7a79435c0a6", + "sha256:f2e69b3ed24544b0d3dbe2c5c0ba5153ce50dcebb576fdc4696d52aa22db6034", + "sha256:f87ec75864c37c4c6cb908d282e1969e79763e0d9becdfe9fe5473b7bb1e5f09", + "sha256:fbec11614dba0424ca72f4e8ba3c420dba07b4a7c206c8c8e4e73f2e98f4c559", + "sha256:fd69666217b62fa5d7c6aa88e507493a34dec4fa20c5bd925e4bc12fce586639" + ], + "markers": "python_version < '3.11'", + "version": "==1.15.0" + }, + "xmltodict": { + "hashes": [ + "sha256:341595a488e3e01a85a9d8911d8912fd922ede5fecc4dce437eb4b6c8d037e56", + "sha256:aa89e8fd76320154a40d19a0df04a4695fb9dc5ba977cbb68ab3e4eb225e7852" + ], + "markers": "python_version >= '3.4'", + "version": "==0.13.0" + }, + "zipp": { + "hashes": [ + "sha256:112929ad649da941c23de50f356a2b5570c954b65150642bccdd66bf194d224b", + "sha256:48904fc76a60e542af151aded95726c1a5c34ed43ab4134b597665c86d7ad556" + ], + "markers": "python_version < '3.10'", + "version": "==3.15.0" + } + } +} diff --git a/README.md b/README.md new file mode 100644 index 00000000..e69de29b diff --git a/cdk/.gitignore b/cdk/.gitignore new file mode 100644 index 00000000..37833f8b --- /dev/null +++ b/cdk/.gitignore @@ -0,0 +1,10 @@ +*.swp +package-lock.json +__pycache__ +.pytest_cache +.venv +*.egg-info + +# CDK asset staging directory +.cdk.staging +cdk.out diff --git a/cdk/README.md b/cdk/README.md new file mode 100644 index 00000000..c53f0b50 --- /dev/null +++ b/cdk/README.md @@ -0,0 +1,58 @@ + +# Welcome to your CDK Python project! + +This is a blank project for CDK development with Python. + +The `cdk.json` file tells the CDK Toolkit how to execute your app. + +This project is set up like a standard Python project. The initialization +process also creates a virtualenv within this project, stored under the `.venv` +directory. To create the virtualenv it assumes that there is a `python3` +(or `python` for Windows) executable in your path with access to the `venv` +package. If for any reason the automatic creation of the virtualenv fails, +you can create the virtualenv manually. + +To manually create a virtualenv on MacOS and Linux: + +``` +$ python3 -m venv .venv +``` + +After the init process completes and the virtualenv is created, you can use the following +step to activate your virtualenv. + +``` +$ source .venv/bin/activate +``` + +If you are a Windows platform, you would activate the virtualenv like this: + +``` +% .venv\Scripts\activate.bat +``` + +Once the virtualenv is activated, you can install the required dependencies. + +``` +$ pip install -r requirements.txt +``` + +At this point you can now synthesize the CloudFormation template for this code. + +``` +$ cdk synth +``` + +To add additional dependencies, for example other CDK libraries, just add +them to your `setup.py` file and rerun the `pip install -r requirements.txt` +command. + +## Useful commands + + * `cdk ls` list all stacks in the app + * `cdk synth` emits the synthesized CloudFormation template + * `cdk deploy` deploy this stack to your default AWS account/region + * `cdk diff` compare deployed stack with current state + * `cdk docs` open CDK documentation + +Enjoy! diff --git a/cdk/app.py b/cdk/app.py new file mode 100644 index 00000000..32f971fa --- /dev/null +++ b/cdk/app.py @@ -0,0 +1,28 @@ +#!/usr/bin/env python3 +import os + +import aws_cdk as cdk + +from cdk.cdk_stack import CdkStack + + +app = cdk.App() +CdkStack(app, "CdkStack", + # If you don't specify 'env', this stack will be environment-agnostic. + # Account/Region-dependent features and context lookups will not work, + # but a single synthesized template can be deployed anywhere. + + # Uncomment the next line to specialize this stack for the AWS Account + # and Region that are implied by the current CLI configuration. + + #env=cdk.Environment(account=os.getenv('CDK_DEFAULT_ACCOUNT'), region=os.getenv('CDK_DEFAULT_REGION')), + + # Uncomment the next line if you know exactly what Account and Region you + # want to deploy the stack to. */ + + #env=cdk.Environment(account='123456789012', region='us-east-1'), + + # For more information, see https://docs.aws.amazon.com/cdk/latest/guide/environments.html + ) + +app.synth() diff --git a/cdk/cdk.json b/cdk/cdk.json new file mode 100644 index 00000000..d2701013 --- /dev/null +++ b/cdk/cdk.json @@ -0,0 +1,49 @@ +{ + "app": "python3 app.py", + "watch": { + "include": [ + "**" + ], + "exclude": [ + "README.md", + "cdk*.json", + "requirements*.txt", + "source.bat", + "**/__init__.py", + "python/__pycache__", + "tests" + ] + }, + "context": { + "@aws-cdk/aws-lambda:recognizeLayerVersion": true, + "@aws-cdk/core:checkSecretUsage": true, + "@aws-cdk/core:target-partitions": [ + "aws", + "aws-cn" + ], + "@aws-cdk-containers/ecs-service-extensions:enableDefaultLogDriver": true, + "@aws-cdk/aws-ec2:uniqueImdsv2TemplateName": true, + "@aws-cdk/aws-ecs:arnFormatIncludesClusterName": true, + "@aws-cdk/aws-iam:minimizePolicies": true, + "@aws-cdk/core:validateSnapshotRemovalPolicy": true, + "@aws-cdk/aws-codepipeline:crossAccountKeyAliasStackSafeResourceName": true, + "@aws-cdk/aws-s3:createDefaultLoggingPolicy": true, + "@aws-cdk/aws-sns-subscriptions:restrictSqsDescryption": true, + "@aws-cdk/aws-apigateway:disableCloudWatchRole": true, + "@aws-cdk/core:enablePartitionLiterals": true, + "@aws-cdk/aws-events:eventsTargetQueueSameAccount": true, + "@aws-cdk/aws-iam:standardizedServicePrincipals": true, + "@aws-cdk/aws-ecs:disableExplicitDeploymentControllerForCircuitBreaker": true, + "@aws-cdk/aws-iam:importedRoleStackSafeDefaultPolicyName": true, + "@aws-cdk/aws-s3:serverAccessLogsUseBucketPolicy": true, + "@aws-cdk/aws-route53-patters:useCertificate": true, + "@aws-cdk/customresources:installLatestAwsSdkDefault": false, + "@aws-cdk/aws-rds:databaseProxyUniqueResourceName": true, + "@aws-cdk/aws-codedeploy:removeAlarmsFromDeploymentGroup": true, + "@aws-cdk/aws-apigateway:authorizerChangeDeploymentLogicalId": true, + "@aws-cdk/aws-ec2:launchTemplateDefaultUserData": true, + "@aws-cdk/aws-secretsmanager:useAttachedSecretResourcePolicyForSecretTargetAttachments": true, + "@aws-cdk/aws-redshift:columnId": true, + "@aws-cdk/aws-stepfunctions-tasks:enableEmrServicePolicyV2": true + } +} diff --git a/cdk/cdk/__init__.py b/cdk/cdk/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/cdk/cdk/cdk_stack.py b/cdk/cdk/cdk_stack.py new file mode 100644 index 00000000..f5a5d092 --- /dev/null +++ b/cdk/cdk/cdk_stack.py @@ -0,0 +1,37 @@ +from aws_cdk import ( + # Duration, + Stack, + aws_config as config + # aws_sqs as sqs, +) +from constructs import Construct +from pathlib import Path + +class CdkStack(Stack): + + def __init__(self, scope: Construct, construct_id: str, **kwargs) -> None: + super().__init__(scope, construct_id, **kwargs) + + # The code that defines your stack goes here + + # example resource + # queue = sqs.Queue( + # self, "RdkCdkQueue", + # visibility_timeout=Duration.seconds(300), + # ) + rule_name = "MyRuleCFNGuard" + rule_dir = "rdk_rules" + sample_policy_text = Path(f'{rule_dir}/{rule_name}/rule_code.guard').read_text() + + # sample_policy_text = """ + # rule checkcompliance when + # resourceType IN ['AWS::SNS::Topic'] { + # awsRegion == "us-east-1" + # } + # """ + + config.CustomPolicy(self, "CustomSnsPolicy", + policy_text=sample_policy_text, + enable_debug_log=True, + rule_scope=config.RuleScope.from_resources([config.ResourceType.SNS_TOPIC]) + ) \ No newline at end of file diff --git a/cdk/requirements-dev.txt b/cdk/requirements-dev.txt new file mode 100644 index 00000000..92709451 --- /dev/null +++ b/cdk/requirements-dev.txt @@ -0,0 +1 @@ +pytest==6.2.5 diff --git a/cdk/requirements.txt b/cdk/requirements.txt new file mode 100644 index 00000000..9452f930 --- /dev/null +++ b/cdk/requirements.txt @@ -0,0 +1,2 @@ +aws-cdk-lib==2.72.1 +constructs>=10.0.0,<11.0.0 diff --git a/cdk/source.bat b/cdk/source.bat new file mode 100644 index 00000000..9e1a8344 --- /dev/null +++ b/cdk/source.bat @@ -0,0 +1,13 @@ +@echo off + +rem The sole purpose of this script is to make the command +rem +rem source .venv/bin/activate +rem +rem (which activates a Python virtualenv on Linux or Mac OS X) work on Windows. +rem On Windows, this command just runs this batch file (the argument is ignored). +rem +rem Now we don't need to document a Windows command for activating a virtualenv. + +echo Executing .venv\Scripts\activate.bat for you +.venv\Scripts\activate.bat diff --git a/cdk/tests/__init__.py b/cdk/tests/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/cdk/tests/unit/__init__.py b/cdk/tests/unit/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/cdk/tests/unit/test_cdk_stack.py b/cdk/tests/unit/test_cdk_stack.py new file mode 100644 index 00000000..b266920e --- /dev/null +++ b/cdk/tests/unit/test_cdk_stack.py @@ -0,0 +1,15 @@ +import aws_cdk as core +import aws_cdk.assertions as assertions + +from cdk.cdk_stack import CdkStack + +# example tests. To run these tests, uncomment this file along with the example +# resource in cdk/cdk_stack.py +def test_sqs_queue_created(): + app = core.App() + stack = CdkStack(app, "cdk") + template = assertions.Template.from_stack(stack) + +# template.has_resource_properties("AWS::SQS::Queue", { +# "VisibilityTimeout": 300 +# }) diff --git a/docs/faq.md b/docs/faq.md new file mode 100644 index 00000000..b81fe619 --- /dev/null +++ b/docs/faq.md @@ -0,0 +1,3 @@ +## What is RDK? + +The AWS Config Rules Development Kit helps developers set up, author and test custom Config rules. It contains scripts to enable AWS Config, create a Config rule and test it with sample ConfigurationItems. diff --git a/docs/install.md b/docs/install.md new file mode 100644 index 00000000..eb2a5b21 --- /dev/null +++ b/docs/install.md @@ -0,0 +1,74 @@ +## Prerequisites + +RDK requires `cdk` version 2 (or higher) to be installed and available in the `PATH`. + +RDK is developed in Python and requires Python v3.8 (or higher). + +## Installing RDK + +RDK is distributed as a Python Package (`rdk`) + +### Using `pip` + +_CLI_: + +```bash +pip install 'rdk>=1,<2' +``` + +_requirements.txt_: + +```text +rdk>=1,<2 +``` + +### Using `pipenv` + +_CLI_: + +```bash +pipenv install 'rdk>=1,<2' +``` + +_Pipfile_: + +```toml +[[source]] +name = "pypi" +verify_ssl = true + +[packages] +rdk = ">=1,<2" +``` + +### Using `poetry` + +_CLI_: + +```bash +poetry add 'rdk>=1,<2' +``` + +_pyproject.toml_: + +```toml +[tool.poetry] +[[tool.poetry.source]] +name = "pypi" +default = true + +[tool.poetry.dependencies] +rdk = ">=1,<2" +``` + +### Using `pipx` + +```bash +pipx install 'rdk>=1,<2' +``` + +### Using `conda` + +```bash +conda install 'rdk>=1,<2' +``` diff --git a/docs/usage/development.md b/docs/usage/development.md new file mode 100644 index 00000000..e69de29b diff --git a/docs/usage/getting-started.md b/docs/usage/getting-started.md new file mode 100644 index 00000000..1dfc80f7 --- /dev/null +++ b/docs/usage/getting-started.md @@ -0,0 +1,3 @@ +## Introduction + +The AWS Config Rules Development Kit helps developers set up, author and test custom Config rules. It contains scripts to enable AWS Config, create a Config rule and test it with sample ConfigurationItems. \ No newline at end of file diff --git a/mkdocs.yml b/mkdocs.yml new file mode 100644 index 00000000..8f41dce0 --- /dev/null +++ b/mkdocs.yml @@ -0,0 +1,62 @@ +################################################################################ +# MKDOCS CONFIGURATIONS +################################################################################ + +# +# Reference: +# https://www.mkdocs.org/user-guide/configuration/ +# + +# ------------------------------------------------------------------------------ +# SITE CONFIGURATIONS +# ------------------------------------------------------------------------------ + +site_name: RDK User Guide +site_url: + +repo_url: https://github.com/awslabs/aws-config-rdk +repo_name: Github + +theme: + name: material + palette: + - media: '(prefers-color-scheme: light)' + scheme: default + primary: blue + accent: red + toggle: + icon: material/toggle-switch-off-outline + name: Switch to dark mode + - media: '(prefers-color-scheme: dark)' + scheme: slate + primary: blue + accent: red + toggle: + icon: material/toggle-switch + name: Switch to light mode + features: + - navigation.sections + - navigation.top + +markdown_extensions: +- toc: + permalink: true + +# ------------------------------------------------------------------------------ +# NAVIGATION +# ------------------------------------------------------------------------------ + +nav: +- Home: index.md +- Install: install.md +- FAQ: faq.md +- Usage: + - Getting Started: usage/getting-started.md + - AWS Configurations: usage/aws-configurations.md + - Developing Test Cases: usage/development.md +- Reference: + - Command Line: reference/cli.md + - Test Case: reference/test-case.md + - API: reference/api/ + +################################################################################ diff --git a/mypy.ini b/mypy.ini new file mode 100644 index 00000000..6710a685 --- /dev/null +++ b/mypy.ini @@ -0,0 +1,25 @@ +############################################################################### +# MYPY CONFIGURATIONS +############################################################################### + +# +# Reference: +# https://mypy.readthedocs.io/en/stable/config_file.html +# + +[mypy] +# Behavior +ignore_missing_imports = True + +# Outputs +pretty = True +show_error_codes = True + +# Reports +html_report = .reports/mypy-html +junit_xml = .reports/mypy.xml + +# Excludes +exclude = mugc.py + +############################################################################### diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 00000000..e5a40d1c --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,124 @@ +############################################################################### +# PYTHON PROJECT CONFIGS +############################################################################### + +# ----------------------------------------------------------------------------- +# BLACK +# ----------------------------------------------------------------------------- +[tool.black] +target-version = ["py38"] + +# ----------------------------------------------------------------------------- +# ISORT +# ----------------------------------------------------------------------------- +[tool.isort] +profile = "black" +virtual_env = ".venv" + +# ----------------------------------------------------------------------------- +# PYLINT +# ----------------------------------------------------------------------------- +[tool.pylint.MASTER] + +# Load Plugins +# pylint_pytest: Suppress pytest related false-positives +# pylint_json2html: Support jsonextended output-format +load-plugins = "pylint_pytest,pylint_json2html" + +# Use multiple processes to speed up Pylint. Specifying 0 will auto-detect the +# number of processors available to use. +jobs = 0 + +# Pickle collected data for later comparisons. +persistent = "no" + +# Min score threshold +fail-under = "9.0" + +# Files or directories to be skipped. They should be base names, not paths. +ignore = "mugc.py" + +[tool.pylint.BASIC] + +# Naming conventions +# Allowed: lower-cased single words, or snake_case words +attr-rgx = "^(?:[a-z0-9_]+)$" +argument-rgx = "^(?:[a-z0-9_]+)$" +variable-rgx = "^(?:[a-z0-9_]+)$" +inlinevar-rgx = "^(?:[a-z0-9_]+)$" + +# Display hints on above naming conventions +include-naming-hint = "yes" + +[tool.pylint."MESSAGES CONTROL"] + +# Why? +# * broad-except: We are trying to catch all exceptions +# * duplicate-code: Caused by similar import statements +# * line-too-long: black takes care of this +# * logging-fstring-interpolation: why would we NOT use f-strings ever +# * missing-module-docstring: we document each function/class/method + +disable = """ +broad-except, +duplicate-code, +line-too-long, +logging-fstring-interpolation, +missing-module-docstring, +""" + +# ----------------------------------------------------------------------------- +# PYTEST +# ----------------------------------------------------------------------------- +[tool.pytest.ini_options] + +# classes called `Test` +# https://docs.pytest.org/en/latest/example/pythoncollection.html#changing-naming-conventions +python_files = "test_*.py" +python_classes = "PyTest" +python_functions = "test_*" + +# CLI Options +addopts = """\ + --exitfirst \ + --strict-config \ + -vv \ + -p no:warnings \ + --junit-xml=.reports/junit.xml \ + --report-log=.reports/pytest-log.json \ + --cov=rdk \ + --cov-config=.coveragerc \ + --no-cov-on-fail \ + --cov-report=term \ + --cov-report=html \ + --cov-report=xml + """ + +# Enable `pytest` CLI logging +log_cli = true +log_cli_level = "INFO" + +# Set package log level +log_level = "DEBUG" +log_date_format = "%Y-%m-%dT%H:%M:%S%z" +log_format = "%(asctime)s | %(levelname)-8s | %(message)s" + +# Junit suite-name +junit_suite_name = "rdk" + +# pytest-console-scripts options +script_launch_mode = "subprocess" + +# pytest-env +env = [ + # Fake creds for moto + "AWS_ACCESS_KEY_ID=testing", + "AWS_SECRET_ACCESS_KEY=testing", + "AWS_SESSION_TOKEN=testing", + "AWS_SECURITY_TOKEN=testing", + "AWS_DEFAULT_REGION=us-east-1", + "AWS_REGION=us-east-1", + "MOTO_ACCOUNT_ID=123456789012", +] + +############################################################################### diff --git a/rdk/__init__.py b/rdk/__init__.py new file mode 100644 index 00000000..c80e7d48 --- /dev/null +++ b/rdk/__init__.py @@ -0,0 +1,11 @@ +from rdk.version import __version__ + +# Package metadata +NAME = "rdk" +DIST_NAME = "rdk" +CLI_NAME = "rdk" +VERSION = __version__ +DESCRIPTION = "The AWS Config Rules Development Kit helps developers set up, author and test custom Config rules. It contains scripts to enable AWS Config, create a Config rule and test it with sample ConfigurationItems." +MAINTAINER = "RDK maintainer" +MAINTAINER_EMAIL = "rdk-maintainers@amazon.com" +URL = "https://github.com/awslabs/aws-config-rdk" diff --git a/rdk/__main__.py b/rdk/__main__.py new file mode 100644 index 00000000..9edec4dc --- /dev/null +++ b/rdk/__main__.py @@ -0,0 +1,8 @@ +""" +Allow invoking the CLI using `python -m rdk` +""" + +import rdk.cli.main + +if __name__ == "__main__": + rdk.cli.main.main() diff --git a/rdk/cli/__init__.py b/rdk/cli/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/rdk/cli/commands/__init__.py b/rdk/cli/commands/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/rdk/cli/commands/deploy.py b/rdk/cli/commands/deploy.py new file mode 100644 index 00000000..38cb16f4 --- /dev/null +++ b/rdk/cli/commands/deploy.py @@ -0,0 +1,17 @@ +import sys +from typing import Any, Callable, Dict, List, Optional + + +from rdk.utils.logger import get_main_logger +from rdk.core.rules_deploy import RulesDeploy + + +def run(rulenames: List[str], dryrun: bool): + """ + test sub-command handler. + """ + + logger = get_main_logger() + logger.info("RDK is starting ...") + + sys.exit(RulesDeploy(rulenames=rulenames, dryrun=dryrun).run()) diff --git a/rdk/cli/commands/init.py b/rdk/cli/commands/init.py new file mode 100644 index 00000000..8016074b --- /dev/null +++ b/rdk/cli/commands/init.py @@ -0,0 +1,15 @@ +import sys + +# from rdk.core.init import RdkInitializer +from rdk.utils.logger import get_main_logger + + +def run(): + """ + init sub-command handler. + """ + + logger = get_main_logger() + logger.info("AWS Config initializing is starting ...") + + sys.exit(print("RDK initializer")) diff --git a/rdk/cli/main.py b/rdk/cli/main.py new file mode 100644 index 00000000..66910efb --- /dev/null +++ b/rdk/cli/main.py @@ -0,0 +1,115 @@ +import argparse +import logging +import os +from pathlib import Path + +import rdk as this_pkg +import rdk.cli.commands.init as init_cmd +import rdk.cli.commands.deploy as deploy_cmd +import rdk.utils.logger as rdk_logger + + +def main(): + """ + Main CLI handler. + """ + + # Main parser + main_parser = argparse.ArgumentParser( + prog=this_pkg.CLI_NAME, + description=this_pkg.DESCRIPTION, + allow_abbrev=False, + ) + main_parser.add_argument( + "-v", + "--version", + action="version", + version=this_pkg.VERSION, + help="show the version and exit", + ) + + # --quiet and --debug are mutually exclusive + log_options = main_parser.add_mutually_exclusive_group() + log_options.add_argument( + "-q", + "--quiet", + action="store_true", + default=False, + help="suppress informational logs", + ) + log_options.add_argument( + "-d", + "--debug", + action="store_true", + default=False, + help="display debug logs", + ) + + # Commands parser + commands_parser = main_parser.add_subparsers( + title="commands", + dest="command", + required=True, + metavar="", + help=f"Use {this_pkg.NAME} --help for detailed usage", + ) + + # deploy + commands_parser.add_parser( + "init", + help="Sets up AWS Config. This will enable configuration recording in AWS and ensure necessary S3 buckets and IAM Roles are created.", + ) + + # # test + commands_parser_deploy = commands_parser.add_parser( + "deploy", + help="deploy AWS Config Rules", + ) + + commands_parser_deploy.add_argument( + "rulename", + metavar="", + nargs="*", + default="", + help="Rule name(s) to deploy. Rule(s) will be pushed to AWS." + ) + + commands_parser_deploy.add_argument( + "-n", + "--dryrun", + action="store_true", + default=False, + help="Dry run mode", + ) + + # _pytest -- hidden command used by pytests + commands_parser.add_parser( + "_pytest", + ) + + # Parse all args and commands + args = main_parser.parse_args() + + # Init logger + logger = rdk_logger.init_main_logger() + + # Adjust log levels + if args.quiet: + rdk_logger.update_stream_handler_level(logger=logger, level=logging.WARNING) + if args.debug: + rdk_logger.update_stream_handler_level(logger=logger, level=logging.DEBUG) + + # handle: _pytest (do nothing) + if args.command == "_pytest": + pass + + # handle: init + if args.command == "init": + init_cmd.run() + + # handle: deploy + if args.command == "deploy": + deploy_cmd.run( + rulenames=args.rulename, + dryrun=args.dryrun, + ) \ No newline at end of file diff --git a/rdk/core/__init__.py b/rdk/core/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/rdk/core/errors.py b/rdk/core/errors.py new file mode 100644 index 00000000..a9bf7dac --- /dev/null +++ b/rdk/core/errors.py @@ -0,0 +1,135 @@ +""" +Well-known exceptions raised by Rdk. +""" + + +class RdkError(Exception): + """ + Base class for all Rdk errors. + """ + + +class RdkAwsAccountInvalidError(RdkError): + """ + Current set of AWS Credentials belongs to an unsupported AWS Account. + """ + + +class RdkAwsRegionNotSetError(RdkError): + """ + Current AWS Region was not determined. + """ + + +class RdkAwsS3GetObjectError(RdkError): + """ + Error occured when fetching from S3. + """ + + +class RdkAwsS3UploadObjectError(RdkError): + """ + Error occured when uploading to S3. + """ + + +class RdkAwsS3DeleteObjectError(RdkError): + """ + Error occured when deleting an S3 object. + """ + + +class RdkCommandInvokeError(RdkError): + """ + Error occured when invoking a command. + """ + + +class RdkCommandExecutionError(RdkError): + """ + Error occured when executing a command. + """ + + +class RdkCommandNotAllowedError(RdkError): + """ + An unsupported command was requested to be executed. + """ + + +class RdkCustodianPolicyReadError(RdkError): + """ + Error reading a custodian policy. + """ + + +class RdkCustodianUnsupportedModeError(RdkError): + """ + Custodian policy is using an unsupported mode. + """ + + +class RdkCustodianLambdaMonitorError(RdkError): + """ + Error when monitoring Custodian-managed Lambda Functions. + """ + + +class RdkCustodianActionWaiterError(RdkError): + """ + Error when implementing wait for custodian actions. + """ + + +class RdkCustodianLambdaInvokeError(RdkError): + """ + Error when invoking Custodian-managed Lambda Functions. + """ + + +class RdkMalformedPlanFile(RdkError): + """ + Malformed Rdk Test Plan File. + """ + + +class RdkPyTestFixtureInitError(RdkError): + """ + Error initializing RdkPyTestFixture. + """ + + +class RdkTestExecutionError(RdkError): + """ + Error while executing Rdk test case. + """ + + +class RdkTerraformMalformedPlanData(RdkError): + """ + Malformed Terraform JSON Plan-Representation. + """ + + +class RdkTerraformMalformedStateData(RdkError): + """ + Malformed Terraform JSON State-Representation. + """ + + +class RdkTerraformAvenueDownloadError(RdkError): + """ + Error downloading terraform-avenue provider. + """ + + +class RdkReportUploadS3Error(RdkError): + """ + Error uploading a test report to S3. + """ + + +class RdkReportUploadInvalidEnvironmentError(RdkError): + """ + Invalid Report Upload Environment. + """ diff --git a/rdk/core/rules_deploy.py b/rdk/core/rules_deploy.py new file mode 100644 index 00000000..6892d540 --- /dev/null +++ b/rdk/core/rules_deploy.py @@ -0,0 +1,53 @@ +import logging +import time +from dataclasses import dataclass, field +from pathlib import Path +from typing import List, Optional, Union + +import rdk.utils.logger as rdk_logger +from rdk.runners.cdk import CdkRunner + +def _resolve_path( + root: Path, + thing: Union[str, Path], +) -> Path: + """ + Helper to resolve and verify paths. + """ + resolved = (root / thing).resolve().absolute() + if not resolved.exists(): + raise FileNotFoundError(resolved.as_posix()) + return resolved + +@dataclass +class RulesDeploy: + """ + Defines rules for deployment. + + Parameters: + + * **`rulenames`** (_str_): list of rule names to deploy + + """ + + rulenames: List[str] + dryrun: bool + + logger: logging.Logger = field(init=False) + + def __post_init__(self): + self.logger = rdk_logger.get_main_logger() + + def run(self): + """ + Runs Rules Deployment + """ + + rules_dir = Path(self.rulenames[0]) + + cdk_runner = CdkRunner( + root_module=Path("./cdk"), + rules_dir=rules_dir + ) + + cdk_runner.synthesize() \ No newline at end of file diff --git a/rdk/py.typed b/rdk/py.typed new file mode 100644 index 00000000..e69de29b diff --git a/rdk/runners/__init__.py b/rdk/runners/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/rdk/runners/base.py b/rdk/runners/base.py new file mode 100644 index 00000000..2712198c --- /dev/null +++ b/rdk/runners/base.py @@ -0,0 +1,217 @@ +# +# NOTE: +# This class uses subprocess.run(...) which is, in some cases, subject +# to shell-injection attacks. However, in this case, we are +# (1) Using shell=False, to not use a shell (safer) and +# (2) Using a well known command in args +# For the most part, we should be using subprocess as safely as possible. +# The places that bandit warns about will be silenced with '# nosec' +# + +import logging +import re +import selectors +import subprocess # nosec +import sys +import time +from dataclasses import dataclass, field +from typing import Dict, List, Optional, TextIO + +from rdk.core.errors import ( + RdkCommandExecutionError, + RdkCommandInvokeError, + RdkCommandNotAllowedError, +) +# from rdk.utils.logger import get_testcase_logger +from rdk.utils.logger import get_main_logger + + + +@dataclass +class BaseRunner: + """ + Base class for various command runners. + """ + + logger: logging.Logger = field(init=False) + + def __post_init__(self): + # self.logger = get_testcase_logger() + self.logger = get_main_logger() + + + # Linter notes: + # * Yes pylint, we know this method is complicated. + # * bandit does not like subprocess. See note at the top of this file + def run_cmd( # pylint: disable=too-many-arguments,too-many-locals,too-many-statements + self, + cmd: List[str], + cwd: Optional[str] = None, + env: Optional[Dict[str, str]] = None, + allowed_return_codes: Optional[List[int]] = None, + capture_output: bool = False, + discard_output: bool = False, + ) -> str: + """ + Runs a command using `subprocess.popen`. + + Parameters: + + * **`cmd`** (_list of str_): The command to run. + * **`cwd`** (_str_): Optional directory to run the command in. + * **`env`** (_mapping_): Optional mapping of environment variables to set. + * **`allowed_return_codes`** (_list of int_): Optional list of acceptable return codes. + * **`capture_output`** (_bool_): Optionally return stdout. Default is `False` + * **`discard_output`** (_bool_): Optionally send stdout to dev-null. Default is `False` + + """ + + if not allowed_return_codes: + allowed_return_codes = [0] + + self._check_if_command_is_allowed(cmd[0]) + self.logger.debug(f"Running Command: {' '.join(cmd)}") + + subprocess_popen_kwargs = { + "args": cmd, + # RDK is always non-interactive + "stdin": subprocess.DEVNULL, + # output streams are logged by default + # These will get changed below based on other flags + "stdout": subprocess.PIPE, + "stderr": subprocess.PIPE, + # We're only dealing with text streams for now + "universal_newlines": True, + } + if cwd: + subprocess_popen_kwargs["cwd"] = cwd + if env: + subprocess_popen_kwargs["env"] = env + + # Command output log handling flags + # By default, we log stderr as INFO, + # but relog it as ERROR upon a failure + loglevel_stdout = logging.INFO + loglevel_stderr = logging.INFO + + # What are we doing with outputs? + if capture_output or discard_output: + loglevel_stdout = logging.DEBUG + loglevel_stderr = logging.ERROR + + # Linter ignores: + # * mypy is not happy about `subprocess_run_kwargs`, it thinks we are + # passing a Dict[str, Obj] + # * pylint does not recognize the `**` unpacking for kwargs + # * bandit warns about `subprocess.run` in general. See note at the top + # of this file. + + # Default returns + return_code = 255 + captured_stdout_lines = [] + captured_stderr_lines = [] + + # Run + try: + with subprocess.Popen(**subprocess_popen_kwargs) as process: # type: ignore[call-overload] # nosec + # Read stdout and stderr streams + selctr = selectors.DefaultSelector() + for _maybe_fileobj in [process.stdout, process.stderr]: + if _maybe_fileobj is not None: + selctr.register( + fileobj=_maybe_fileobj, events=selectors.EVENT_READ + ) + + def _log_streams(is_final: bool = False): + """ + Log stuff based on stdout or stderr. + """ + for _selkey, _ in selctr.select(): + # NOTE: Selector key can be empty + if _selkey: + this_fileobj: TextIO = _selkey.fileobj # type: ignore + for _line in iter(this_fileobj.readline, ""): + # Fixup lines + _line_no_escapes = re.sub( + # Remove all escape sequences + # https://superuser.com/a/380778 + r"\x1b\[[0-9;]*[a-zA-Z]", + "", + _line, + ) + _line_rstripped = _line_no_escapes.rstrip() + _line_stripped = _line_no_escapes.strip() + + # Decide what to do with them ... + if this_fileobj is process.stdout: + # This line is a stdout + if capture_output and _line_stripped: + captured_stdout_lines.append(_line_stripped) + if _line_rstripped: + self.logger.log( + level=loglevel_stdout, + msg=_line_rstripped, + ) + if this_fileobj is process.stderr: + # This line is a stderr + if _line_stripped: + captured_stderr_lines.append(_line_stripped) + if _line_rstripped: + self.logger.log( + level=loglevel_stderr, + msg=_line_rstripped, + ) + + # If this is not the final call, iterate + # over each selector alternatively + if not is_final: + break + + # Process streams while the command is running + while process.poll() is None: + _log_streams() + + # Again, if stuff is leftover in the fd's + time.sleep(0.10) + _log_streams(is_final=True) + + # Get return code + return_code = process.returncode + except Exception as exc: + self.logger.exception(exc) + raise RdkCommandInvokeError("Failed to invoke requested command") from exc + + if return_code not in allowed_return_codes: + # log any errors + for _line in captured_stderr_lines: + self.logger.error(_line) + raise RdkCommandExecutionError( + f"Command execution failed with an unacceptable exit code: {return_code}" + ) + + if capture_output: + return "\n".join(captured_stdout_lines) + + return "OK" + + def get_python_executable(self) -> str: # pylint: disable=no-self-use + """ + Returns the current python executable. + """ + current_python_executable = "python" + if sys.executable: + current_python_executable = sys.executable + return current_python_executable + + def _check_if_command_is_allowed(self, cmd: str): + if cmd not in [ + "cdk", + "ls", + "pwd", + # "custodian", + # "git", + # "pytest", + # "terraform", + self.get_python_executable(), + ]: + raise RdkCommandNotAllowedError(f"Unsupported command provided: {cmd}") diff --git a/rdk/runners/cdk.py b/rdk/runners/cdk.py new file mode 100644 index 00000000..10376811 --- /dev/null +++ b/rdk/runners/cdk.py @@ -0,0 +1,53 @@ +import copy +import json +import os +from dataclasses import dataclass, field +from pathlib import Path +from typing import Any, Dict, List, Optional + +import rdk as this_pkg +from rdk.runners.base import BaseRunner + + +@dataclass +class CdkRunner(BaseRunner): + """ + Helper class to run cdk commands. + https://docs.aws.amazon.com/cdk/v2/guide/hello_world.html + + Parameters: + + * **`root_module`** (_Path_): Path to the cdk root module + * **`config`** (_Config_): `rdk.core.config.Config` object + + """ + + root_module: Path + rules_dir: Path + + def __post_init__(self): + super().__post_init__() + + + def synthesize(self): + """ + Executes `cdk synth`. + + Parameters: + """ + cmd = [ + "cdk", + "synth" + ] + + + self.logger.info("Synthsizing CloudFormation template(s)...") + self.logger.info(self.root_module.as_posix()) + self.logger.info(self.rules_dir) + + + self.run_cmd( + cmd=cmd, + cwd=self.root_module.as_posix(), + allowed_return_codes=[0, 2], + ) diff --git a/rdk/utils/__init__.py b/rdk/utils/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/rdk/utils/logger.py b/rdk/utils/logger.py new file mode 100644 index 00000000..141448b2 --- /dev/null +++ b/rdk/utils/logger.py @@ -0,0 +1,151 @@ +import logging +import logging.handlers +import os +import sys +from pathlib import Path +from typing import Optional + +import colorlog + +from rdk import NAME as PKG_NAME + +LOGFILE_NAME = "rdk.log" +LOG_DATE_FMT = "%Y-%m-%dT%H:%M:%S%z" +LOG_COLORS = { + "DEBUG": "cyan", + "INFO": "green", + "WARNING": "yellow", + "ERROR": "red", + "CRITICAL": "bold_red", +} + + +def _fixup_friendly_name(thing: str) -> str: + if len(thing) < 8: + return thing.ljust(8) + if len(thing) > 8: + return thing[:6] + ".." + return thing + + +def _get_log_msg_format(friendly_name: Optional[str] = None) -> str: + components = [ + "%(asctime)s", + "%(levelname)-8s", + "%(message)s", + ] + if friendly_name: + components.insert(2, _fixup_friendly_name(friendly_name)) + return " | ".join(components) + + +def _get_colorlog_msg_format(friendly_name: Optional[str] = None) -> str: + components = [ + "%(thin)s%(asctime)s%(reset)s", + "%(log_color)s%(levelname)-8s%(reset)s", + "%(message)s", + ] + if friendly_name: + components.insert( + 2, + "%(thin_purple)s" + _fixup_friendly_name(friendly_name) + "%(reset)s", + ) + return " | ".join(components) + + +def _do_colorlogs() -> bool: + # Check TTY + isa_tty = False + try: + if sys.stderr.isatty(): + isa_tty = True + except Exception: + isa_tty = False + if not isa_tty: + return False + + # Check if NO_COLOR is requested (https://no-color.org/) + no_color = os.getenv("NO_COLOR", default="").lower() + if no_color and no_color in ["yes", "y", "true", "on", "1"]: + return False + + # Enable colors + return True + + +def _get_stream_handler(friendly_name: Optional[str] = None) -> logging.StreamHandler: + # Build formatters + log_formatter = logging.Formatter( + fmt=_get_log_msg_format(friendly_name=friendly_name), + datefmt=LOG_DATE_FMT, + ) + + # Colors? + if _do_colorlogs(): + log_formatter = colorlog.ColoredFormatter( + fmt=_get_colorlog_msg_format(friendly_name=friendly_name), + datefmt=LOG_DATE_FMT, + reset=True, + log_colors=LOG_COLORS, + ) + + # Build stream handler + stderr_handler = logging.StreamHandler() + stderr_handler.setFormatter(log_formatter) + stderr_handler.setLevel(logging.INFO) + + return stderr_handler + + +def init_main_logger() -> logging.Logger: + """ + Initialize main logger. + """ + friendly_name = "main" + logger = logging.getLogger(f"{PKG_NAME}.cli.{friendly_name}") + logger.setLevel(logging.DEBUG) + logger.addHandler(logging.NullHandler()) + logger.addHandler(_get_stream_handler(friendly_name=friendly_name)) + return logger + + +def add_file_handler(logger: logging.Logger, logfile_dir: Path): + """ + Add a file handler to an existing logger once the location is known. + """ + + friendly_name = logger.name.split(".")[-1] + logfile_dir.mkdir(parents=True, exist_ok=True) + + logfile_formatter = logging.Formatter( + fmt=_get_log_msg_format(friendly_name=friendly_name), + datefmt=LOG_DATE_FMT, + ) + + file_handler = logging.handlers.RotatingFileHandler( + filename=(logfile_dir / LOGFILE_NAME), + mode="a", + encoding="utf-8", + maxBytes=10485760, # 10mb + backupCount=10, + ) + file_handler.setFormatter(logfile_formatter) + file_handler.setLevel(logging.DEBUG) + logger.addHandler(file_handler) + + +def update_stream_handler_level(logger: logging.Logger, level: int): + """ + Dynamically update log levels for a stream handler. + """ + for handler in logger.handlers: + if isinstance(handler, logging.StreamHandler): + handler.setLevel(level) + + +def get_main_logger() -> logging.Logger: + """ + Return main logger. + """ + friendly_name = "main" + return logging.getLogger(f"{PKG_NAME}.cli.{friendly_name}") diff --git a/rdk/version.py b/rdk/version.py new file mode 100644 index 00000000..5becc17c --- /dev/null +++ b/rdk/version.py @@ -0,0 +1 @@ +__version__ = "1.0.0" diff --git a/requirements.txt b/requirements.txt new file mode 100644 index 00000000..d131e82c --- /dev/null +++ b/requirements.txt @@ -0,0 +1,35 @@ +-i https://pypi.python.org/simple +aiofiles==0.8.0 +anyio==3.6.2 ; python_full_version >= '3.6.2' +attrs==22.2.0 ; python_version >= '3.6' +aws-cdk-lib==2.72.1 +aws-cdk.asset-awscli-v1==2.2.129 ; python_version ~= '3.7' +aws-cdk.asset-kubectl-v20==2.1.1 ; python_version ~= '3.7' +aws-cdk.asset-node-proxy-agent-v5==2.0.105 ; python_version ~= '3.7' +cattrs==22.2.0 ; python_version >= '3.7' +certifi==2022.12.7 ; python_version >= '3.6' +colorlog==4.8.0 +constructs==10.1.302 +exceptiongroup==1.1.1 ; python_version < '3.11' +h11==0.14.0 ; python_version >= '3.7' +httpcore==0.16.3 ; python_version >= '3.7' +httpx==0.23.3 +idna==3.4 +importlib-resources==5.12.0 ; python_version >= '3.7' +iniconfig==2.0.0 ; python_version >= '3.7' +jsii==1.80.0 ; python_version ~= '3.7' +mergedeep==1.3.4 +packaging==23.0 ; python_version >= '3.7' +pluggy==1.0.0 ; python_version >= '3.6' +publication==0.0.3 +py==1.11.0 ; python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4' +pytest==6.2.5 +python-dateutil==2.8.2 ; python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2' +rfc3986[idna2008]==1.5.0 +semver==2.13.0 +six==1.16.0 ; python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2' +sniffio==1.3.0 ; python_version >= '3.7' +toml==0.10.2 ; python_version >= '2.6' and python_version not in '3.0, 3.1, 3.2' +typeguard==2.13.3 ; python_full_version >= '3.5.3' +typing-extensions==4.5.0 ; python_version >= '3.7' +zipp==3.15.0 ; python_version < '3.10' diff --git a/setup.py b/setup.py new file mode 100644 index 00000000..062b8b23 --- /dev/null +++ b/setup.py @@ -0,0 +1,43 @@ +from setuptools import find_packages, setup + +import rdk as this_pkg + +setup( + name=this_pkg.DIST_NAME, + version=this_pkg.VERSION, + description=this_pkg.DESCRIPTION, + long_description=this_pkg.DESCRIPTION, + long_description_content_type="text/plain", + url=this_pkg.URL, + license="Apache-2.0", + author=this_pkg.MAINTAINER, + author_email=this_pkg.MAINTAINER_EMAIL, + maintainer=this_pkg.MAINTAINER, + maintainer_email=this_pkg.MAINTAINER_EMAIL, + python_requires=">=3.8", + zip_safe=False, + packages=find_packages(include=[f"{this_pkg.NAME}", f"{this_pkg.NAME}.*"]), + package_data={ + this_pkg.NAME: ["py.typed"], + }, + install_requires=[ + "aiofiles<1", + # "aws-cdk<2", + "aws-cdk-lib>=2", + "constructs>=10,<11", + # "boto3>=1,<2", + # "c1-p13rlib>=2,<3", + # "c7n", + "colorlog>=4,<5", + "httpx<1", + "mergedeep>=1,<2", + "pytest>=6,<7", + "semver>=2,<3", + ], + entry_points={ + "console_scripts": [ + f"{this_pkg.CLI_NAME}={this_pkg.NAME}.cli.main:main", + ], + "pytest11": [f"pytest_{this_pkg.NAME}={this_pkg.NAME}.pytest.fixture"], + }, +) diff --git a/sonar-project.properties b/sonar-project.properties new file mode 100644 index 00000000..4ce7d7c9 --- /dev/null +++ b/sonar-project.properties @@ -0,0 +1,38 @@ +################################################################################ +# SONAR ANALYSIS PARAMETERS +################################################################################ + +# +# Reference: +# https://docs.sonarqube.org/7.9/analysis/analysis-parameters/ +# https://docs.sonarqube.org/7.9/analysis/coverage/ +# https://docs.sonarqube.org/7.9/analysis/external-issues/ +# https://docs.sonarqube.org/7.9/analysis/languages/python/ +# https://docs.sonarqube.org/7.9/analysis/scan/sonarscanner/ +# https://docs.sonarqube.org/7.9/branches/overview/ +# + +# Server Configs +sonar.host.url=https://sonar.cloud.example.com/ + +# Project Configs +sonar.projectKey=rdk +sonar.projectName=rdk +sonar.projectVersion=latest +sonar.projectDescription=CLI to Test Cloud Custodian Policies +sonar.links.homepage=https://github.com/awslabs/aws-config-rdk + +# Repo Configs +sonar.sourceEncoding=UTF-8 +sonar.scm.provider=git + +# Paths +sonar.tests=tests/unit + +# Reports +sonar.python.bandit.reportPaths=.reports/bandit.json +sonar.python.coverage.reportPaths=.reports/coverage.xml +sonar.python.pylint.reportPath=.reports/pylint-sonar.txt +sonar.python.xunit.reportPath=.reports/junit.xml + +################################################################################ diff --git a/tests/__init__.py b/tests/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/tests/unit/__init__.py b/tests/unit/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/tests/unit/cli/__init__.py b/tests/unit/cli/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/tests/unit/cli/command/__init__.py b/tests/unit/cli/command/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/tests/unit/cli/command/test_init.py b/tests/unit/cli/command/test_init.py new file mode 100644 index 00000000..ee344cc2 --- /dev/null +++ b/tests/unit/cli/command/test_init.py @@ -0,0 +1,14 @@ +import pytest +from pytest_mock import MockerFixture + +from rdk.cli.commands import init as init_cmd + + +def test_run_exit(mocker: MockerFixture): + mock1 = mocker.patch("rdk.core.init.RdkInitializer.run") + + with pytest.raises(SystemExit) as exc_info: + init_cmd.run() + assert exc_info.type is SystemExit + + mock1.assert_called_once() diff --git a/tests/unit/cli/conftest.py b/tests/unit/cli/conftest.py new file mode 100644 index 00000000..01311cb6 --- /dev/null +++ b/tests/unit/cli/conftest.py @@ -0,0 +1,8 @@ +import pytest + +from rdk import CLI_NAME + + +@pytest.fixture +def cli_name() -> str: + return CLI_NAME diff --git a/tests/unit/conftest.py b/tests/unit/conftest.py new file mode 100644 index 00000000..5423a1db --- /dev/null +++ b/tests/unit/conftest.py @@ -0,0 +1,15 @@ +import logging +from pathlib import Path + +import pytest +from moto import mock_sts + +import rdk.utils.logger as rdk_logger + +# Silence boto3 logs in tests +for name in ["boto", "urllib3", "s3transfer", "boto3", "botocore", "nose"]: + logging.getLogger(name).setLevel(logging.CRITICAL) + +# Enable debug logs for rdk +logger = rdk_logger.get_main_logger() +rdk_logger.update_stream_handler_level(logger=logger, level=logging.DEBUG) \ No newline at end of file diff --git a/tests/unit/core/__init__.py b/tests/unit/core/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/tests/unit/core/conftest.py b/tests/unit/core/conftest.py new file mode 100644 index 00000000..07efbe49 --- /dev/null +++ b/tests/unit/core/conftest.py @@ -0,0 +1,8 @@ +from pathlib import Path + +import pytest + + +@pytest.fixture +def data_path() -> Path: + return (Path(__file__).parent / "data").resolve().absolute() diff --git a/tests/unit/core/data/my-pytest.py b/tests/unit/core/data/my-pytest.py new file mode 100644 index 00000000..e69de29b diff --git a/tests/unit/core/data/plan-bad-01.yaml b/tests/unit/core/data/plan-bad-01.yaml new file mode 100644 index 00000000..0a4d092b --- /dev/null +++ b/tests/unit/core/data/plan-bad-01.yaml @@ -0,0 +1,23 @@ +tests: + - name: my-test-name + policy: + file: policy.yaml + name: my-policy-name-existing + resource_state: existing + terraform: + root: tf-root-module + pytest: + file_or_dir: + - my-pytest.py + + # duplicate name + - name: my-test-name + policy: + file: policy.yaml + name: my-policy-name-existing + resource_state: existing + terraform: + root: tf-root-module + pytest: + file_or_dir: + - my-pytest.py diff --git a/tests/unit/core/data/plan-bad-02.yaml b/tests/unit/core/data/plan-bad-02.yaml new file mode 100644 index 00000000..6644ecaf --- /dev/null +++ b/tests/unit/core/data/plan-bad-02.yaml @@ -0,0 +1,11 @@ +tests: + - name: my-test-name + policy: + file: policy-does-not-exist.yaml + name: my-policy-name-existing + resource_state: existing + terraform: + root: tf-root-module + pytest: + file_or_dir: + - my-pytest.py diff --git a/tests/unit/core/data/plan-bad-03.yaml b/tests/unit/core/data/plan-bad-03.yaml new file mode 100644 index 00000000..32f9f07b --- /dev/null +++ b/tests/unit/core/data/plan-bad-03.yaml @@ -0,0 +1,7 @@ +tests: + - name: my-test-name + terraform: + root: tf-root-module + pytest: + file_or_dir: + - my-pytest.py diff --git a/tests/unit/core/data/plan-bad-04.yaml b/tests/unit/core/data/plan-bad-04.yaml new file mode 100644 index 00000000..a70c6b60 --- /dev/null +++ b/tests/unit/core/data/plan-bad-04.yaml @@ -0,0 +1,11 @@ +tests: + - name: my-test-name + policy: + file: policy.yaml + name: my-policy-name-existing + resource_state: huh + terraform: + root: tf-root-module + pytest: + file_or_dir: + - my-pytest.py diff --git a/tests/unit/core/data/plan-bad-05.yaml b/tests/unit/core/data/plan-bad-05.yaml new file mode 100644 index 00000000..e87a6fcf --- /dev/null +++ b/tests/unit/core/data/plan-bad-05.yaml @@ -0,0 +1,11 @@ +tests: + - name: my-test-name + policy: + file: policy.yaml + name: my-policy-name-existing + resource_state: existing + terraform: + root: tf-root-module-does-not-exist + pytest: + file_or_dir: + - my-pytest.py diff --git a/tests/unit/core/data/plan-bad-06.yaml b/tests/unit/core/data/plan-bad-06.yaml new file mode 100644 index 00000000..7de21739 --- /dev/null +++ b/tests/unit/core/data/plan-bad-06.yaml @@ -0,0 +1,13 @@ +tests: + - name: my-test-name + policy: + file: policy.yaml + name: my-policy-name-existing + resource_state: existing + terraform: + root: tf-root-module + tfvars: + - does-not-exist.tfvar + pytest: + file_or_dir: + - my-pytest.py diff --git a/tests/unit/core/data/plan-bad-07.yaml b/tests/unit/core/data/plan-bad-07.yaml new file mode 100644 index 00000000..9d1aa414 --- /dev/null +++ b/tests/unit/core/data/plan-bad-07.yaml @@ -0,0 +1,13 @@ +tests: + - name: my-test-name + policy: + file: policy.yaml + name: my-policy-name-existing + resource_state: existing + terraform: + root: tf-root-module + tfvars_net_change: + - does-not-exist.tfvar + pytest: + file_or_dir: + - my-pytest.py diff --git a/tests/unit/core/data/plan-bad-08.yaml b/tests/unit/core/data/plan-bad-08.yaml new file mode 100644 index 00000000..b8103687 --- /dev/null +++ b/tests/unit/core/data/plan-bad-08.yaml @@ -0,0 +1,11 @@ +tests: + - name: my-test-name + policy: + file: policy.yaml + name: my-policy-name-existing + resource_state: existing + terraform: + root: tf-root-module + pytest: + file_or_dir: + - my-pytest-does-not-exist.py diff --git a/tests/unit/core/data/plan-bad-09.yaml b/tests/unit/core/data/plan-bad-09.yaml new file mode 100644 index 00000000..46d7717a --- /dev/null +++ b/tests/unit/core/data/plan-bad-09.yaml @@ -0,0 +1,9 @@ +tests: + - name: my-test-name + policy: + name: my-policy-name-existing + terraform: + root: tf-root-module + pytest: + file_or_dir: + - my-pytest-does-not-exist.py diff --git a/tests/unit/core/data/plan-bad-10.yaml b/tests/unit/core/data/plan-bad-10.yaml new file mode 100644 index 00000000..f99e6259 --- /dev/null +++ b/tests/unit/core/data/plan-bad-10.yaml @@ -0,0 +1,11 @@ +tests: + - name: my-test-name + policy: + file: policy.yaml + name: my-policy-name-existing + resource_state: existing + terraform: + huh: tf-root-module + pytest: + file_or_dir: + - my-pytest.py diff --git a/tests/unit/core/data/plan-bad-11.yaml b/tests/unit/core/data/plan-bad-11.yaml new file mode 100644 index 00000000..530ead45 --- /dev/null +++ b/tests/unit/core/data/plan-bad-11.yaml @@ -0,0 +1,11 @@ +tests: + - name: my-test-name + policy: + file: policy.yaml + name: my-policy-name-existing + resource_state: existing + terraform: + root: tf-root-module + pytest: + huh: + - my-pytest.py diff --git a/tests/unit/core/data/plan-good-01.yaml b/tests/unit/core/data/plan-good-01.yaml new file mode 100644 index 00000000..bd148f52 --- /dev/null +++ b/tests/unit/core/data/plan-good-01.yaml @@ -0,0 +1,46 @@ +tests: + - name: my-test-name + policy: + file: policy.yaml + name: my-policy-name-existing + resource_state: existing + runtime_files: + c7n_resources: + - runtime-files/file1.json + - runtime-files/file2.json + terraform: + root: tf-root-module + pytest: + options: + - --cache-clear + file_or_dir: + - my-pytest.py + + - name: my-test-name-2 + policy: + file: policy.yaml + name: my-policy-name-net-new + resource_state: net-new + terraform: + root: tf-root-module + tfvars: + - tf-root-module/my-var-1.tfvars + - tf-root-module/my-var-2.tfvars + pytest: + file_or_dir: + - my-pytest.py::test_method + + - name: my-test-name-3 + policy: + file: policy.yaml + name: my-policy-name-net-change + resource_state: net-change + terraform: + root: tf-root-module + tfvars: + - tf-root-module/my-var-1.tfvars + tfvars_net_change: + - tf-root-module/my-var-1.tfvars + pytest: + file_or_dir: + - my-pytest.py diff --git a/tests/unit/core/data/plan-good-02.yaml b/tests/unit/core/data/plan-good-02.yaml new file mode 100644 index 00000000..d6ec5cc7 --- /dev/null +++ b/tests/unit/core/data/plan-good-02.yaml @@ -0,0 +1,13 @@ +tests: + - name: my-test-name + policy: + file: policy.yaml + name: my-policy-name-existing + resource_state: existing + terraform: + root: tf-root-module + pytest: + options: + - --cache-clear + file_or_dir: + - my-pytest.py \ No newline at end of file diff --git a/tests/unit/core/data/policy.yaml b/tests/unit/core/data/policy.yaml new file mode 100644 index 00000000..ff9ee9ed --- /dev/null +++ b/tests/unit/core/data/policy.yaml @@ -0,0 +1,20 @@ +policies: + +- name: my-policy-name-existing + resource: ec2 + +- name: my-policy-name-net-new + resource: ec2 + mode: + type: cloudtrail + events: + - RunInstances + +- name: my-policy-name-net-change + resource: sns + mode: + type: cloudtrail + events: + - source: sns.amazonaws.com + event: SetTopicAttributes + ids: "requestParameters.topicArn" diff --git a/tests/unit/core/data/runtime-files/file1.json b/tests/unit/core/data/runtime-files/file1.json new file mode 100644 index 00000000..8a796876 --- /dev/null +++ b/tests/unit/core/data/runtime-files/file1.json @@ -0,0 +1,3 @@ +{ + "foo": "bar" +} \ No newline at end of file diff --git a/tests/unit/core/data/runtime-files/file2.json b/tests/unit/core/data/runtime-files/file2.json new file mode 100644 index 00000000..ee3dfe22 --- /dev/null +++ b/tests/unit/core/data/runtime-files/file2.json @@ -0,0 +1,3 @@ +{ + "foo1": "bar1" +} \ No newline at end of file diff --git a/tests/unit/core/data/tf-root-module/main.tf b/tests/unit/core/data/tf-root-module/main.tf new file mode 100644 index 00000000..e69de29b diff --git a/tests/unit/core/data/tf-root-module/my-net-change-1.tfvars b/tests/unit/core/data/tf-root-module/my-net-change-1.tfvars new file mode 100644 index 00000000..e69de29b diff --git a/tests/unit/core/data/tf-root-module/my-net-change-2.tfvars b/tests/unit/core/data/tf-root-module/my-net-change-2.tfvars new file mode 100644 index 00000000..e69de29b diff --git a/tests/unit/core/data/tf-root-module/my-var-1.tfvars b/tests/unit/core/data/tf-root-module/my-var-1.tfvars new file mode 100644 index 00000000..e69de29b diff --git a/tests/unit/core/data/tf-root-module/my-var-2.tfvars b/tests/unit/core/data/tf-root-module/my-var-2.tfvars new file mode 100644 index 00000000..e69de29b diff --git a/tests/unit/core/test_errors.py b/tests/unit/core/test_errors.py new file mode 100644 index 00000000..de52cc25 --- /dev/null +++ b/tests/unit/core/test_errors.py @@ -0,0 +1,40 @@ +import rdk.core.errors as rdk_errors + + +def test_errors_hierarchy(): + assert issubclass(rdk_errors.RdkError, Exception) + + assert issubclass(rdk_errors.RdkAwsAccountInvalidError, rdk_errors.RdkError) + assert issubclass(rdk_errors.RdkAwsRegionNotSetError, rdk_errors.RdkError) + assert issubclass(rdk_errors.RdkAwsS3GetObjectError, rdk_errors.RdkError) + assert issubclass(rdk_errors.RdkAwsS3UploadObjectError, rdk_errors.RdkError) + assert issubclass(rdk_errors.RdkAwsS3DeleteObjectError, rdk_errors.RdkError) + + assert issubclass(rdk_errors.RdkCommandInvokeError, rdk_errors.RdkError) + assert issubclass(rdk_errors.RdkCommandExecutionError, rdk_errors.RdkError) + assert issubclass(rdk_errors.RdkCommandNotAllowedError, rdk_errors.RdkError) + + assert issubclass(rdk_errors.RdkCustodianPolicyReadError, rdk_errors.RdkError) + assert issubclass( + rdk_errors.RdkCustodianUnsupportedModeError, rdk_errors.RdkError + ) + assert issubclass( + rdk_errors.RdkCustodianLambdaMonitorError, rdk_errors.RdkError + ) + assert issubclass(rdk_errors.RdkCustodianLambdaInvokeError, rdk_errors.RdkError) + + assert issubclass(rdk_errors.RdkMalformedPlanFile, rdk_errors.RdkError) + assert issubclass(rdk_errors.RdkPyTestFixtureInitError, rdk_errors.RdkError) + + assert issubclass(rdk_errors.RdkTerraformMalformedPlanData, rdk_errors.RdkError) + assert issubclass( + rdk_errors.RdkTerraformMalformedStateData, rdk_errors.RdkError + ) + assert issubclass( + rdk_errors.RdkTerraformAvenueDownloadError, rdk_errors.RdkError + ) + + assert issubclass( + rdk_errors.RdkReportUploadInvalidEnvironmentError, rdk_errors.RdkError + ) + assert issubclass(rdk_errors.RdkReportUploadS3Error, rdk_errors.RdkError) diff --git a/tests/unit/runners/__init__.py b/tests/unit/runners/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/tests/unit/runners/conftest.py b/tests/unit/runners/conftest.py new file mode 100644 index 00000000..fc4b27b7 --- /dev/null +++ b/tests/unit/runners/conftest.py @@ -0,0 +1,11 @@ +from unittest.mock import Mock + +import pytest +from pytest_mock import MockerFixture + + +@pytest.fixture +def run_cmd_mock( + mocker: MockerFixture, +) -> Mock: + return mocker.patch("rdk.runners.base.BaseRunner.run_cmd") diff --git a/tests/unit/runners/test_base.py b/tests/unit/runners/test_base.py new file mode 100644 index 00000000..feb584b2 --- /dev/null +++ b/tests/unit/runners/test_base.py @@ -0,0 +1,130 @@ +import logging +import subprocess +import sys +import uuid + +import pytest +from pytest_mock import MockerFixture + +from rdk.core.errors import ( + RdkCommandExecutionError, + RdkCommandInvokeError, + RdkCommandNotAllowedError, +) +from rdk.runners.base import BaseRunner + + +def test__check_if_command_is_allowed(): + runner = BaseRunner() + for cmd in [ + "cdk", + ]: + runner._check_if_command_is_allowed(cmd) + + with pytest.raises(RdkCommandNotAllowedError): + runner._check_if_command_is_allowed("foo") + + +def test_get_python_executable(monkeypatch: pytest.MonkeyPatch): + runner = BaseRunner() + with monkeypatch.context() as m: + m.setattr(sys, "executable", None) + assert runner.get_python_executable() == "python" + + +def test_run_cmd_basic(mocker: MockerFixture): + + # Init + runner = BaseRunner() + subprocess_popen_mock = mocker.patch("subprocess.Popen") + subprocess_popen_mock.return_value.__enter__().returncode = 0 + mocker.patch("selectors.DefaultSelector") + + # Test basic arguments pass-thru + runner.run_cmd( + cmd=["cdk", "--version"], + cwd="test", + env={ + "test": "test", + }, + ) + subprocess_popen_mock.assert_called_with( + args=["cdk", "--version"],, + stdin=subprocess.DEVNULL, + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + universal_newlines=True, + cwd="test", + env={ + "test": "test", + }, + ) + + # Test bad commands + subprocess_popen_mock.reset_mock(return_value=True, side_effect=True) + subprocess_popen_mock.side_effect = FileNotFoundError("File foo does not exist") + with pytest.raises(RdkCommandInvokeError): + runner.run_cmd(cmd=["cdk", "--version"],) + + # Test return codes + subprocess_popen_mock.reset_mock(return_value=True, side_effect=True) + subprocess_popen_mock.return_value.__enter__().returncode = 2 + with pytest.raises(RdkCommandExecutionError): + runner.run_cmd(cmd=["cdk", "--version"],, allowed_return_codes=[1]) + + +def test_run_cmd_logging( + caplog: pytest.LogCaptureFixture, +): + this_python = sys.executable + + caplog.set_level(logging.DEBUG) + runner = BaseRunner() + + # Basic logs + runner.run_cmd( + cmd=[ + this_python, + "-c", + "import sys;print('hello');print('world',file=sys.stderr)", + ] + ) + assert "hello" in caplog.text + assert "world" in caplog.text + + # Capture output + response = runner.run_cmd( + cmd=[ + this_python, + "-c", + "import sys;print('hello')", + ], + capture_output=True, + ) + assert "hello" in caplog.text + assert response == "hello" + + # Discard output + response = runner.run_cmd( + cmd=[ + this_python, + "-c", + "import sys;print('hello')", + ], + discard_output=True, + ) + assert "hello" in caplog.text + assert "hello" not in response + + # Discard output (with error) + with pytest.raises(RdkCommandExecutionError): + runner.run_cmd( + cmd=[ + this_python, + "-c", + "import sys;print('hello\\n');print('world\\n',file=sys.stderr);sys.exit(1)", + ], + discard_output=True, + ) + assert "hello" in caplog.text + assert "world" in caplog.text diff --git a/tests/unit/test_pkg_metadata.py b/tests/unit/test_pkg_metadata.py new file mode 100644 index 00000000..a6cd90bb --- /dev/null +++ b/tests/unit/test_pkg_metadata.py @@ -0,0 +1,18 @@ +import semver + +import rdk + + +def test_pkg_metadata(): + for pkg_metadata in [ + "NAME", + "VERSION", + "DESCRIPTION", + "MAINTAINER", + "MAINTAINER_EMAIL", + "URL", + ]: + assert hasattr(rdk, pkg_metadata) + assert getattr(rdk, pkg_metadata) is not None + + assert semver.VersionInfo.isvalid(getattr(rdk, "VERSION")) diff --git a/tests/unit/utils/__init__.py b/tests/unit/utils/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/tests/unit/utils/conftest.py b/tests/unit/utils/conftest.py new file mode 100644 index 00000000..07efbe49 --- /dev/null +++ b/tests/unit/utils/conftest.py @@ -0,0 +1,8 @@ +from pathlib import Path + +import pytest + + +@pytest.fixture +def data_path() -> Path: + return (Path(__file__).parent / "data").resolve().absolute() diff --git a/tests/unit/utils/data/runtime-files/file1.json b/tests/unit/utils/data/runtime-files/file1.json new file mode 100644 index 00000000..8a796876 --- /dev/null +++ b/tests/unit/utils/data/runtime-files/file1.json @@ -0,0 +1,3 @@ +{ + "foo": "bar" +} \ No newline at end of file diff --git a/tests/unit/utils/data/runtime-files/file2.json b/tests/unit/utils/data/runtime-files/file2.json new file mode 100644 index 00000000..ee3dfe22 --- /dev/null +++ b/tests/unit/utils/data/runtime-files/file2.json @@ -0,0 +1,3 @@ +{ + "foo1": "bar1" +} \ No newline at end of file diff --git a/tests/unit/utils/test_logger.py b/tests/unit/utils/test_logger.py new file mode 100644 index 00000000..0edf9c83 --- /dev/null +++ b/tests/unit/utils/test_logger.py @@ -0,0 +1,180 @@ +import logging +import logging.handlers +import sys +from pathlib import Path +from typing import List, Optional + +import pytest +from colorlog import ColoredFormatter +from pytest_mock import MockerFixture + +import rdk.utils.logger as rdk_logger +from rdk import NAME as PKG_NAME + + +def test__fixup_friendly_name(): + assert rdk_logger._fixup_friendly_name("123") == "123 " + assert rdk_logger._fixup_friendly_name("12345678") == "12345678" + assert rdk_logger._fixup_friendly_name("1234567890") == "123456.." + + +def test__get_log_msg_format(): + assert len(rdk_logger._get_log_msg_format().split(" | ")) == 3 + + friendly_name = "xyz" + msgf = rdk_logger._get_log_msg_format(friendly_name=friendly_name) + assert len(msgf.split(" | ")) == 4 + assert "asctime" in msgf + assert "levelname" in msgf + assert "message" in msgf + assert friendly_name in msgf + + +def test__get_colorlog_msg_format(): + assert len(rdk_logger._get_colorlog_msg_format().split(" | ")) == 3 + + friendly_name = "xyz" + msgf = rdk_logger._get_colorlog_msg_format(friendly_name=friendly_name) + assert len(msgf.split(" | ")) == 4 + assert "asctime" in msgf + assert "levelname" in msgf + assert "message" in msgf + assert "log_color" in msgf + assert friendly_name in msgf + + +def test__do_colorlogs(monkeypatch: pytest.MonkeyPatch): + # NO_COLOR + with monkeypatch.context() as m: + m.setattr(sys.stderr, "isatty", lambda: True, raising=False) + m.setenv("NO_COLOR", "yes") + assert not rdk_logger._do_colorlogs() + + # TTY + with monkeypatch.context() as m: + m.setattr(sys.stderr, "isatty", lambda: False, raising=False) + m.delenv("NO_COLOR", raising=False) + assert not rdk_logger._do_colorlogs() + + with monkeypatch.context() as m: + m.setattr(sys.stderr, "isatty", ValueError("dummy"), raising=False) + m.delenv("NO_COLOR", raising=False) + assert not rdk_logger._do_colorlogs() + + # Enabled + with monkeypatch.context() as m: + m.setattr(sys.stderr, "isatty", lambda: True, raising=False) + m.delenv("NO_COLOR", raising=False) + assert rdk_logger._do_colorlogs() + + +def test__get_stream_handler(): + assert isinstance( + rdk_logger._get_stream_handler(friendly_name="xyz"), logging.StreamHandler + ) + + +def test_init_main_logger(): + logger = rdk_logger.init_main_logger() + assert isinstance(logger, logging.Logger) + assert logger.name.startswith(PKG_NAME) + assert "main" in logger.name + assert any(isinstance(x, logging.StreamHandler) for x in logger.handlers) + assert logger.level == logging.DEBUG + del logger + + +def test_init_testcase_logger(): + execution_id = "xyz" + logger = rdk_logger.init_testcase_logger(execution_id=execution_id) + assert isinstance(logger, logging.Logger) + assert logger.name.startswith(PKG_NAME) + assert "rdktest" in logger.name + assert execution_id in logger.name + assert any( + isinstance(x, logging.StreamHandler) and x.level == logging.INFO + for x in logger.handlers + ) + assert logger.level == logging.DEBUG + del logger + + +def test_add_file_handler(tmp_path: Path): + logger = rdk_logger.init_main_logger() + rdk_logger.add_file_handler(logger=logger, logfile_dir=tmp_path) + assert any( + isinstance(x, logging.handlers.RotatingFileHandler) for x in logger.handlers + ) + del logger + + +def test_update_stream_handler_level(): + logger = rdk_logger.init_main_logger() + assert any( + isinstance(x, logging.StreamHandler) and x.level == logging.INFO + for x in logger.handlers + ) + rdk_logger.update_stream_handler_level(logger=logger, level=logging.DEBUG) + assert any( + isinstance(x, logging.StreamHandler) and x.level == logging.DEBUG + for x in logger.handlers + ) + del logger + + +def test_get_testcase_logger(monkeypatch: pytest.MonkeyPatch): + + with monkeypatch.context() as m: + logger = rdk_logger.get_testcase_logger() + assert "unknown" in logger.name + del logger + + with monkeypatch.context() as m: + logger = rdk_logger.get_testcase_logger(execution_id="xyz") + assert "xyz" in logger.name + del logger + + with monkeypatch.context() as m: + logger = rdk_logger.get_testcase_logger() + assert "abc" in logger.name + del logger + + +def test_get_main_logger(): + logger = rdk_logger.get_main_logger() + assert "main" in logger.name + del logger + + +def test_logging_formatters_no_color( + mocker: MockerFixture, +): + # no color + mock1 = mocker.patch("rdk.utils.logger._do_colorlogs") + mock1.return_value = False + logger = rdk_logger.init_main_logger() + assert any( + isinstance(f, logging.Formatter) + for f in _get_logging_formatters(logger.handlers) + ) + + +def _get_logging_formatters(handlers: Optional[List[logging.Handler]]): + formatters = [] + if handlers is not None: + for h in handlers: + formatters.append(h.formatter) + return formatters + + +def test_logging_formatters_with_color( + mocker: MockerFixture, +): + # no color + mock1 = mocker.patch("rdk.utils.logger._do_colorlogs") + mock1.return_value = True + logger = rdk_logger.init_main_logger() + assert any( + isinstance(f, ColoredFormatter) + for f in _get_logging_formatters(logger.handlers) + ) diff --git a/tools/ci/bin/init-snapshot.sh b/tools/ci/bin/init-snapshot.sh new file mode 100755 index 00000000..802e05eb --- /dev/null +++ b/tools/ci/bin/init-snapshot.sh @@ -0,0 +1,83 @@ +#!/usr/bin/env bash + +################################################################################ +# MAKE HELPER TO CHECK WHETHER `make init` NEEDS TO RUN +################################################################################ + +# Bash Option +set -e +set -o pipefail +export IFS=$'\n' + +# err +function _kaput() { + echo "$@" >&2 + exit 1 +} + +# Skip if we are running on jenkins +# jenkins/bogie has a really old version of pipenv that +# is not init-snapshot compatible. +# Also, on jenkins, we're always starting with a fresh init anyways +if test -n "${BUILD_URL}" || [[ "${CI}" == "Jenkins" ]]; then + exit 0 +fi + +# Read operation +declare operation="check" +if [[ $# -gt 0 ]]; then + if [[ "${1}" == "save" ]]; then + operation="save" + fi +fi + +# get reporoot +declare reporoot +reporoot=$(git rev-parse --show-toplevel) \ + || _kaput "Failed to get repository root" + +# Things to include in snapshot +declare -a snapshot_things +for _file in \ + "${reporoot}/Makefile" \ + "${reporoot}/requirements.txt" \ + "${reporoot}/Pipfile.lock" \ + "${reporoot}/.python-version"; do + if test -f "${_file}"; then + snapshot_things+=("${_file}") + fi +done +while IFS= read -r -d '' _file; do + snapshot_things+=("${_file}") +done < <(find "${reporoot}/tools/githooks" -type f -print0) + +# Calculate snapshot +declare snapshot +snapshot=$( + cat "${snapshot_things[@]}" \ + | openssl dgst -sha256 +) || _kaput "ERROR: Failed to calculate snapshot" + +# snapshot location +declare snapshot_root="${reporoot}/.venv" +declare snapshot_file="${snapshot_root}/init-snapshot" + +# create/save +if [[ "${operation}" == "save" ]]; then + # create + mkdir -p "${snapshot_root}" \ + && echo "${snapshot}" > "${snapshot_file}" +else + # check + test -f "${snapshot_file}" || exit 2 + if [[ $(head -1 "${snapshot_file}") == "${snapshot}" ]]; then + exit 0 + else + exit 2 + fi +fi + +# Done +exit 0 + +################################################################################ diff --git a/tools/ci/bin/install-tools-on-jenkins.sh b/tools/ci/bin/install-tools-on-jenkins.sh new file mode 100644 index 00000000..19eccbd0 --- /dev/null +++ b/tools/ci/bin/install-tools-on-jenkins.sh @@ -0,0 +1,49 @@ +#!/usr/bin/env bash + +################################################################################ +# INSTALL TOOLS NEEDED FOR JENKINS JOBS +################################################################################ + +# Bash Options +set -e +set -o pipefail +export IFS=$'\n' + +# err +function _kaput() { + echo "$@" >&2 + exit 1 +} + +# Ensure we're running on Jenkins +if test -z "${BUILD_URL}"; then + _kaput "ERROR: This script should only run within a Jenkins job" +fi + +# Setup ~/.local +declare home_local="/home/git/.local" +if test -e "${home_local}"; then + chmod -R 0755 "${home_local}" + rm -rf "${home_local}" +fi +mkdir -p "${home_local}/bin" "${home_local}/lib" + +# Install pipenv +declare PYENV_VERSION +PYENV_VERSION=$(pyenv versions --bare | grep 3.8 | sort -n | tail -1) +export PYENV_VERSION +pip3 install \ + --index-url https://artifactory.cloud.example.com/artifactory/api/pypi/pypi-internalfacing/simple \ + --upgrade \ + --ignore-installed \ + --user \ + -- pipenv + +# Verify pipenv +hash -r +"${home_local}/bin/pipenv" --version + +# Done +exit 0 + +################################################################################ diff --git a/tools/docs/bin/generate-ref-cli.sh b/tools/docs/bin/generate-ref-cli.sh new file mode 100644 index 00000000..7248dea7 --- /dev/null +++ b/tools/docs/bin/generate-ref-cli.sh @@ -0,0 +1,54 @@ +#!/usr/bin/env bash + +################################################################################ +# GENERATE CLI REFERENCE +################################################################################ + +# Bash Options +set -e +set -o pipefail + +declare content_md="docs/reference/cli.md" + +# Init +echo > "${content_md}" + +# rdk +cat << '_EO_SECTION_START' >> "${content_md}" +## `rdk` + +```text +_EO_SECTION_START +rdk --help >> "${content_md}" 2>&1 +cat << '_EO_SECTION_END' >> "${content_md}" +``` + +_EO_SECTION_END + +# rdk-init +cat << '_EO_SECTION_START' >> "${content_md}" +## `rdk init` + +```text +_EO_SECTION_START +rdk init --help >> "${content_md}" 2>&1 +cat << '_EO_SECTION_END' >> "${content_md}" +``` + +_EO_SECTION_END + +# rdk-deploy +cat << '_EO_SECTION_START' >> "${content_md}" +## `rdk deploy` + +```text +_EO_SECTION_START +rdk test --help >> "${content_md}" 2>&1 +cat << '_EO_SECTION_END' >> "${content_md}" +``` +_EO_SECTION_END + +# Done +exit 0 + +################################################################################ diff --git a/tools/githooks/bin/commit-msg b/tools/githooks/bin/commit-msg new file mode 100755 index 00000000..d7ed122f --- /dev/null +++ b/tools/githooks/bin/commit-msg @@ -0,0 +1,41 @@ +#!/usr/bin/env bash + +############################################################################### +# commit-msg HOOK +############################################################################### + +# +# Based on: +# https://github.com/git/git/blob/master/templates/hooks--commit-msg.sample +# + +# Bash Options +set -eu +set -o pipefail + +# Read Arguments +declare commit_msg_file="${1}" + +# Do we have pipenv? +command -v "pipenv" > /dev/null 2>&1 \ + || exit 0 + +# Lint message +if ! pipenv run -- gitlint --staged --msg-filename "${commit_msg_file}"; then + cat << 'EO_BAD_COMMIT_MSG' + +The commit message provided is not compliant with Conventional Commits. + +For more information on writing commit messages, please review: +https://www.conventionalcommits.org/ + +To bypass this verification intentionally, you can use the `--no-verify` +option. For e.g., `git commit --no-verify -m "some commit message"` +EO_BAD_COMMIT_MSG + exit 1 +fi + +# Done +exit 0 + +############################################################################### diff --git a/tools/githooks/bin/pre-push b/tools/githooks/bin/pre-push new file mode 100755 index 00000000..4ab8ed09 --- /dev/null +++ b/tools/githooks/bin/pre-push @@ -0,0 +1,77 @@ +#!/usr/bin/env bash + +############################################################################### +# pre-push HOOK +############################################################################### + +# +# Based on: +# https://github.com/git/git/blob/master/templates/hooks--pre-push.sample +# + +# Bash Options +set -eu +set -o pipefail + +# Read arguments +declare remote_name="${1}" +declare remote_url="${2}" + +# Zero hash +declare zero +zero=$( + git hash-object --stdin < /dev/null \ + | tr "0-9a-z" "0" +) || exit 0 + +# Parse STDIN +while read -r local_ref local_oid remote_ref remote_oid; do + if [[ "${local_oid}" = "${zero}" ]]; then + # Handle delete + # Nothing to do + continue + fi + + # get commit range + range="" + + if [[ "${remote_oid}" = "${zero}" ]]; then + # New branch, examine all new commits + range="${remote_name}..${local_oid}" + else + # Update to existing branch, examine new commits + range="${remote_oid}..${local_oid}" + fi + + # lint commits + if [[ "${range}" != "" ]]; then + if command -v "pipenv" > /dev/null 2>&1; then + if ! pipenv run -- gitlint --ignore-stdin --commits "${range}"; then + + cat << 'EO_BAD_COMMIT_MSG' + +Commits being pushed are not compliant with Conventional Commits. + +For more information on writing commit messages, please review: +https://www.conventionalcommits.org/ + +To fix your commit messages, you can do one of the following: +* Use `git commit --ammend` if you have only one commit to fix +* Use `git rebase -i HEAD~n` to fix the last n commits. In the + interactive rebase session, you can choose `edit` to fix each + non-compliant commit. + +To bypass this verification intentionally, you can use the `--no-verify` +option. For e.g., `git push --no-verify` +EO_BAD_COMMIT_MSG + + exit 1 + fi + fi + fi +done + +# Done +exit 0 + +############################################################################### diff --git a/tools/githooks/etc/commit-template b/tools/githooks/etc/commit-template new file mode 100644 index 00000000..2173b593 --- /dev/null +++ b/tools/githooks/etc/commit-template @@ -0,0 +1,32 @@ + +# This repository requires that commit messages adhere to +# Conventional Commits (https://www.conventionalcommits.org/) + +# The general format of a commit message is: + +# [optional scope]: + +# [optional body] + +# [optional footer(s)] + +# For example: + +# feat: my shiny new feature + +# Lots of details here about my new feature + +# Refs: FOUNDRY-1234 + +# List of commonly used _types_ and their descriptions: + +# fix: A bugfix +# feat: A new feature +# chore: Routine maintenance +# docs: Documentation updates +# style: Formatting changes +# refactor: API Refactor. This indicates a breaking change +# test: Updates to test cases +# revert: Previous commits being reverted +# ci: Changes to CI configurations +# build: Release activity diff --git a/twine.pypirc b/twine.pypirc new file mode 100644 index 00000000..4f3ddb01 --- /dev/null +++ b/twine.pypirc @@ -0,0 +1,17 @@ +############################################################################### +# .pypirc USED BY twine +############################################################################### + +# +# Reference: +# https://packaging.python.org/specifications/pypirc/ +# https://www.jfrog.com/confluence/display/JFROG/PyPI+Repositories#PyPIRepositories-PublishingtoArtifactory +# + +[distutils] +index-servers = artifactory + +[artifactory] +repository = https://artifactory.cloud.example.com/artifactory/api/pypi/pypi-internalfacing + +############################################################################### From b2f6074f3e5487d67fe35a1ac9c97636361a2f73 Mon Sep 17 00:00:00 2001 From: Ricky Chau Date: Wed, 5 Apr 2023 17:57:53 -0400 Subject: [PATCH 02/23] feat(cdk-int): add cdk deploy steps --- rdk/core/rules_deploy.py | 6 +- {cdk => rdk/frameworks/cdk}/.gitignore | 0 {cdk => rdk/frameworks/cdk}/README.md | 0 {cdk => rdk/frameworks/cdk}/app.py | 0 {cdk => rdk/frameworks/cdk}/cdk.json | 0 {cdk => rdk/frameworks/cdk}/cdk/__init__.py | 0 {cdk => rdk/frameworks/cdk}/cdk/cdk_stack.py | 2 +- .../frameworks/cdk}/requirements-dev.txt | 0 {cdk => rdk/frameworks/cdk}/requirements.txt | 0 {cdk => rdk/frameworks/cdk}/source.bat | 0 {cdk => rdk/frameworks/cdk}/tests/__init__.py | 0 .../frameworks/cdk}/tests/unit/__init__.py | 0 .../cdk}/tests/unit/test_cdk_stack.py | 0 rdk/runners/cdk.py | 56 +++++++++++++++++-- 14 files changed, 56 insertions(+), 8 deletions(-) rename {cdk => rdk/frameworks/cdk}/.gitignore (100%) rename {cdk => rdk/frameworks/cdk}/README.md (100%) rename {cdk => rdk/frameworks/cdk}/app.py (100%) rename {cdk => rdk/frameworks/cdk}/cdk.json (100%) rename {cdk => rdk/frameworks/cdk}/cdk/__init__.py (100%) rename {cdk => rdk/frameworks/cdk}/cdk/cdk_stack.py (97%) rename {cdk => rdk/frameworks/cdk}/requirements-dev.txt (100%) rename {cdk => rdk/frameworks/cdk}/requirements.txt (100%) rename {cdk => rdk/frameworks/cdk}/source.bat (100%) rename {cdk => rdk/frameworks/cdk}/tests/__init__.py (100%) rename {cdk => rdk/frameworks/cdk}/tests/unit/__init__.py (100%) rename {cdk => rdk/frameworks/cdk}/tests/unit/test_cdk_stack.py (100%) diff --git a/rdk/core/rules_deploy.py b/rdk/core/rules_deploy.py index 6892d540..0fd4d165 100644 --- a/rdk/core/rules_deploy.py +++ b/rdk/core/rules_deploy.py @@ -46,8 +46,10 @@ def run(self): rules_dir = Path(self.rulenames[0]) cdk_runner = CdkRunner( - root_module=Path("./cdk"), + root_module=Path().absolute() , rules_dir=rules_dir ) - cdk_runner.synthesize() \ No newline at end of file + cdk_runner.synthesize() + cdk_runner.bootstrap() + cdk_runner.deploy() \ No newline at end of file diff --git a/cdk/.gitignore b/rdk/frameworks/cdk/.gitignore similarity index 100% rename from cdk/.gitignore rename to rdk/frameworks/cdk/.gitignore diff --git a/cdk/README.md b/rdk/frameworks/cdk/README.md similarity index 100% rename from cdk/README.md rename to rdk/frameworks/cdk/README.md diff --git a/cdk/app.py b/rdk/frameworks/cdk/app.py similarity index 100% rename from cdk/app.py rename to rdk/frameworks/cdk/app.py diff --git a/cdk/cdk.json b/rdk/frameworks/cdk/cdk.json similarity index 100% rename from cdk/cdk.json rename to rdk/frameworks/cdk/cdk.json diff --git a/cdk/cdk/__init__.py b/rdk/frameworks/cdk/cdk/__init__.py similarity index 100% rename from cdk/cdk/__init__.py rename to rdk/frameworks/cdk/cdk/__init__.py diff --git a/cdk/cdk/cdk_stack.py b/rdk/frameworks/cdk/cdk/cdk_stack.py similarity index 97% rename from cdk/cdk/cdk_stack.py rename to rdk/frameworks/cdk/cdk/cdk_stack.py index f5a5d092..c1ec6619 100644 --- a/cdk/cdk/cdk_stack.py +++ b/rdk/frameworks/cdk/cdk/cdk_stack.py @@ -20,7 +20,7 @@ def __init__(self, scope: Construct, construct_id: str, **kwargs) -> None: # visibility_timeout=Duration.seconds(300), # ) rule_name = "MyRuleCFNGuard" - rule_dir = "rdk_rules" + rule_dir = "../" sample_policy_text = Path(f'{rule_dir}/{rule_name}/rule_code.guard').read_text() # sample_policy_text = """ diff --git a/cdk/requirements-dev.txt b/rdk/frameworks/cdk/requirements-dev.txt similarity index 100% rename from cdk/requirements-dev.txt rename to rdk/frameworks/cdk/requirements-dev.txt diff --git a/cdk/requirements.txt b/rdk/frameworks/cdk/requirements.txt similarity index 100% rename from cdk/requirements.txt rename to rdk/frameworks/cdk/requirements.txt diff --git a/cdk/source.bat b/rdk/frameworks/cdk/source.bat similarity index 100% rename from cdk/source.bat rename to rdk/frameworks/cdk/source.bat diff --git a/cdk/tests/__init__.py b/rdk/frameworks/cdk/tests/__init__.py similarity index 100% rename from cdk/tests/__init__.py rename to rdk/frameworks/cdk/tests/__init__.py diff --git a/cdk/tests/unit/__init__.py b/rdk/frameworks/cdk/tests/unit/__init__.py similarity index 100% rename from cdk/tests/unit/__init__.py rename to rdk/frameworks/cdk/tests/unit/__init__.py diff --git a/cdk/tests/unit/test_cdk_stack.py b/rdk/frameworks/cdk/tests/unit/test_cdk_stack.py similarity index 100% rename from cdk/tests/unit/test_cdk_stack.py rename to rdk/frameworks/cdk/tests/unit/test_cdk_stack.py diff --git a/rdk/runners/cdk.py b/rdk/runners/cdk.py index 10376811..2331e1bc 100644 --- a/rdk/runners/cdk.py +++ b/rdk/runners/cdk.py @@ -3,6 +3,7 @@ import os from dataclasses import dataclass, field from pathlib import Path +import shutil from typing import Any, Dict, List, Optional import rdk as this_pkg @@ -24,10 +25,16 @@ class CdkRunner(BaseRunner): root_module: Path rules_dir: Path + cdk_app_dir: Path = field(init=False) def __post_init__(self): super().__post_init__() - + cdk_source_dir = Path(__file__).resolve().parent.parent /'frameworks' / 'cdk' + self.logger.info("Getting latest deployment framework from " + cdk_source_dir.as_posix()) + self.logger.info("Deploying latest deployment framework in " + self.root_module.as_posix()) + shutil.rmtree(self.root_module / "cdk") + shutil.copytree(Path(__file__).resolve().parent.parent /'frameworks' / 'cdk', self.root_module / 'cdk') + self.cdk_app_dir = self.root_module / "cdk" def synthesize(self): """ @@ -41,13 +48,52 @@ def synthesize(self): ] - self.logger.info("Synthsizing CloudFormation template(s)...") - self.logger.info(self.root_module.as_posix()) - self.logger.info(self.rules_dir) + self.logger.info("Synthesizing CloudFormation template(s)...") + + self.run_cmd( + cmd=cmd, + cwd=self.cdk_app_dir.as_posix(), + allowed_return_codes=[0, 2], + ) + + def bootstrap(self): + """ + Executes `cdk bootstrap`. + + Parameters: + """ + cmd = [ + "cdk", + "bootstrap" + ] + + self.logger.info("CDK Envrionment Bootstrapping ...") self.run_cmd( cmd=cmd, - cwd=self.root_module.as_posix(), + cwd=self.cdk_app_dir.as_posix(), allowed_return_codes=[0, 2], ) + + def deploy(self): + """ + Executes `cdk deploy`. + + Parameters: + """ + cmd = [ + "cdk", + "deploy", + "--require-approval", + "never" + ] + + + self.logger.info("Deploying AWS Config Rules ...") + + self.run_cmd( + cmd=cmd, + cwd=self.cdk_app_dir.as_posix(), + allowed_return_codes=[0, 2], + ) \ No newline at end of file From 53184110a7d4f7d96996b990ca8cc63a4ce5bd8b Mon Sep 17 00:00:00 2001 From: Ricky Chau Date: Wed, 5 Apr 2023 18:28:36 -0400 Subject: [PATCH 03/23] fix(cdk-int): fix paths --- rdk/frameworks/cdk/cdk/cdk_stack.py | 7 ++++--- rdk/runners/cdk.py | 25 ++++++++++++++++--------- 2 files changed, 20 insertions(+), 12 deletions(-) diff --git a/rdk/frameworks/cdk/cdk/cdk_stack.py b/rdk/frameworks/cdk/cdk/cdk_stack.py index c1ec6619..6e5b7733 100644 --- a/rdk/frameworks/cdk/cdk/cdk_stack.py +++ b/rdk/frameworks/cdk/cdk/cdk_stack.py @@ -19,9 +19,10 @@ def __init__(self, scope: Construct, construct_id: str, **kwargs) -> None: # self, "RdkCdkQueue", # visibility_timeout=Duration.seconds(300), # ) - rule_name = "MyRuleCFNGuard" - rule_dir = "../" - sample_policy_text = Path(f'{rule_dir}/{rule_name}/rule_code.guard').read_text() + # rule_name = "MyRuleCFNGuard" + # rule_dir = Path().absolute() + # sample_policy_text = Path(f'{rule_dir}/{rule_name}/rule_code.guard').read_text() + sample_policy_text = Path(self.node.try_get_context("rules_dir")).joinpath("rule_code.guard").read_text() # sample_policy_text = """ # rule checkcompliance when diff --git a/rdk/runners/cdk.py b/rdk/runners/cdk.py index 2331e1bc..38906671 100644 --- a/rdk/runners/cdk.py +++ b/rdk/runners/cdk.py @@ -29,12 +29,13 @@ class CdkRunner(BaseRunner): def __post_init__(self): super().__post_init__() - cdk_source_dir = Path(__file__).resolve().parent.parent /'frameworks' / 'cdk' - self.logger.info("Getting latest deployment framework from " + cdk_source_dir.as_posix()) - self.logger.info("Deploying latest deployment framework in " + self.root_module.as_posix()) - shutil.rmtree(self.root_module / "cdk") - shutil.copytree(Path(__file__).resolve().parent.parent /'frameworks' / 'cdk', self.root_module / 'cdk') - self.cdk_app_dir = self.root_module / "cdk" + # cdk_source_dir = Path(__file__).resolve().parent.parent /'frameworks' / 'cdk' + # self.logger.info("Getting latest deployment framework from " + cdk_source_dir.as_posix()) + # self.logger.info("Deploying latest deployment framework in " + self.root_module.as_posix()) + # shutil.rmtree(self.root_module / "cdk") + # shutil.copytree(Path(__file__).resolve().parent.parent /'frameworks' / 'cdk', self.root_module / 'cdk') + # self.cdk_app_dir = self.root_module / "cdk" + self.cdk_app_dir = Path(__file__).resolve().parent.parent /'frameworks' / 'cdk' def synthesize(self): """ @@ -44,7 +45,9 @@ def synthesize(self): """ cmd = [ "cdk", - "synth" + "synth", + "--context", + "rules_dir=" + Path().absolute().as_posix() + "/" + "MyRuleCFNGuard" ] @@ -64,11 +67,13 @@ def bootstrap(self): """ cmd = [ "cdk", - "bootstrap" + "bootstrap", + "--context", + "rules_dir=" + Path().absolute().as_posix() + "/" + "MyRuleCFNGuard" ] - self.logger.info("CDK Envrionment Bootstrapping ...") + self.logger.info("Envrionment Bootstrapping ...") self.run_cmd( cmd=cmd, @@ -85,6 +90,8 @@ def deploy(self): cmd = [ "cdk", "deploy", + "--context", + "rules_dir=" + Path().absolute().as_posix() + "/" + "MyRuleCFNGuard", "--require-approval", "never" ] From 5bb3f4211e162547040fb12c67d9d975506229c8 Mon Sep 17 00:00:00 2001 From: Ricky Chau Date: Fri, 7 Apr 2023 19:13:34 -0400 Subject: [PATCH 04/23] feat(managed-rule): add managed rules --- rdk/core/errors.py | 110 +----------------- rdk/core/rules_deploy.py | 22 +--- rdk/frameworks/cdk/cdk/cdk_stack.py | 97 ++++++++++----- rdk/frameworks/cdk/cdk/core/errors.py | 58 +++++++++ .../cdk/cdk/core/rule_parameters.py | 64 ++++++++++ rdk/runners/cdk.py | 7 +- 6 files changed, 202 insertions(+), 156 deletions(-) create mode 100644 rdk/frameworks/cdk/cdk/core/errors.py create mode 100644 rdk/frameworks/cdk/cdk/core/rule_parameters.py diff --git a/rdk/core/errors.py b/rdk/core/errors.py index a9bf7dac..468d0ab5 100644 --- a/rdk/core/errors.py +++ b/rdk/core/errors.py @@ -9,36 +9,6 @@ class RdkError(Exception): """ -class RdkAwsAccountInvalidError(RdkError): - """ - Current set of AWS Credentials belongs to an unsupported AWS Account. - """ - - -class RdkAwsRegionNotSetError(RdkError): - """ - Current AWS Region was not determined. - """ - - -class RdkAwsS3GetObjectError(RdkError): - """ - Error occured when fetching from S3. - """ - - -class RdkAwsS3UploadObjectError(RdkError): - """ - Error occured when uploading to S3. - """ - - -class RdkAwsS3DeleteObjectError(RdkError): - """ - Error occured when deleting an S3 object. - """ - - class RdkCommandInvokeError(RdkError): """ Error occured when invoking a command. @@ -54,82 +24,4 @@ class RdkCommandExecutionError(RdkError): class RdkCommandNotAllowedError(RdkError): """ An unsupported command was requested to be executed. - """ - - -class RdkCustodianPolicyReadError(RdkError): - """ - Error reading a custodian policy. - """ - - -class RdkCustodianUnsupportedModeError(RdkError): - """ - Custodian policy is using an unsupported mode. - """ - - -class RdkCustodianLambdaMonitorError(RdkError): - """ - Error when monitoring Custodian-managed Lambda Functions. - """ - - -class RdkCustodianActionWaiterError(RdkError): - """ - Error when implementing wait for custodian actions. - """ - - -class RdkCustodianLambdaInvokeError(RdkError): - """ - Error when invoking Custodian-managed Lambda Functions. - """ - - -class RdkMalformedPlanFile(RdkError): - """ - Malformed Rdk Test Plan File. - """ - - -class RdkPyTestFixtureInitError(RdkError): - """ - Error initializing RdkPyTestFixture. - """ - - -class RdkTestExecutionError(RdkError): - """ - Error while executing Rdk test case. - """ - - -class RdkTerraformMalformedPlanData(RdkError): - """ - Malformed Terraform JSON Plan-Representation. - """ - - -class RdkTerraformMalformedStateData(RdkError): - """ - Malformed Terraform JSON State-Representation. - """ - - -class RdkTerraformAvenueDownloadError(RdkError): - """ - Error downloading terraform-avenue provider. - """ - - -class RdkReportUploadS3Error(RdkError): - """ - Error uploading a test report to S3. - """ - - -class RdkReportUploadInvalidEnvironmentError(RdkError): - """ - Invalid Report Upload Environment. - """ + """ \ No newline at end of file diff --git a/rdk/core/rules_deploy.py b/rdk/core/rules_deploy.py index 0fd4d165..7508e6a8 100644 --- a/rdk/core/rules_deploy.py +++ b/rdk/core/rules_deploy.py @@ -7,18 +7,6 @@ import rdk.utils.logger as rdk_logger from rdk.runners.cdk import CdkRunner -def _resolve_path( - root: Path, - thing: Union[str, Path], -) -> Path: - """ - Helper to resolve and verify paths. - """ - resolved = (root / thing).resolve().absolute() - if not resolved.exists(): - raise FileNotFoundError(resolved.as_posix()) - return resolved - @dataclass class RulesDeploy: """ @@ -43,13 +31,15 @@ def run(self): Runs Rules Deployment """ - rules_dir = Path(self.rulenames[0]) + if len(self.rulenames) > 0: + rules_dir = Path(self.rulenames[0]) + else: + rules_dir=Path().absolute() cdk_runner = CdkRunner( - root_module=Path().absolute() , rules_dir=rules_dir ) cdk_runner.synthesize() - cdk_runner.bootstrap() - cdk_runner.deploy() \ No newline at end of file + # cdk_runner.bootstrap() + # cdk_runner.deploy() \ No newline at end of file diff --git a/rdk/frameworks/cdk/cdk/cdk_stack.py b/rdk/frameworks/cdk/cdk/cdk_stack.py index 6e5b7733..8a6af557 100644 --- a/rdk/frameworks/cdk/cdk/cdk_stack.py +++ b/rdk/frameworks/cdk/cdk/cdk_stack.py @@ -1,38 +1,81 @@ from aws_cdk import ( - # Duration, Stack, - aws_config as config - # aws_sqs as sqs, + aws_config as config, + aws_lambda as lambda_, ) from constructs import Construct from pathlib import Path +from .core.rule_parameters import get_rule_parameters, get_deploy_rules_list, get_rule_name, rdk_supported_custom_rule_runtime +from .core.errors import RdkRuleTypesInvalidError, RdkParametersInvalidError +import json class CdkStack(Stack): def __init__(self, scope: Construct, construct_id: str, **kwargs) -> None: super().__init__(scope, construct_id, **kwargs) - # The code that defines your stack goes here - - # example resource - # queue = sqs.Queue( - # self, "RdkCdkQueue", - # visibility_timeout=Duration.seconds(300), - # ) - # rule_name = "MyRuleCFNGuard" - # rule_dir = Path().absolute() - # sample_policy_text = Path(f'{rule_dir}/{rule_name}/rule_code.guard').read_text() - sample_policy_text = Path(self.node.try_get_context("rules_dir")).joinpath("rule_code.guard").read_text() - - # sample_policy_text = """ - # rule checkcompliance when - # resourceType IN ['AWS::SNS::Topic'] { - # awsRegion == "us-east-1" - # } - # """ - - config.CustomPolicy(self, "CustomSnsPolicy", - policy_text=sample_policy_text, - enable_debug_log=True, - rule_scope=config.RuleScope.from_resources([config.ResourceType.SNS_TOPIC]) - ) \ No newline at end of file + rules_dir = Path(self.node.try_get_context("rules_dir")) + rules_list = get_deploy_rules_list(rules_dir) + + for rule_path in rules_list: + rule_name = get_rule_name(rule_path) + rule_parameters = get_rule_parameters(rule_path) + + if rule_parameters["Parameters"]["SourceRuntime"] == "cloudformation-guard2.0": + policy_text = rule_path.joinpath("rule_code.guard").read_text() + + try: + source_events = getattr(config.ResourceType, rule_parameters["Parameters"]["SourceEvents"].upper().replace("AWS::", "").replace("::", "_")) + except: + raise RdkParametersInvalidError("Invalid parameters found in Parameters.SourceEvents. Please review https://docs.aws.amazon.com/config/latest/developerguide/resource-config-reference.html") + + config.CustomPolicy(self, rule_name, + policy_text=policy_text, + enable_debug_log=True, + rule_scope=config.RuleScope.from_resources([source_events]) + ) + elif rule_parameters["Parameters"]["SourceIdentifier"]: + try: + source_identifier = getattr(config.ManagedRuleIdentifiers, rule_parameters["Parameters"]["SourceIdentifier"].upper().replace("-", "_")) + except: + raise RdkParametersInvalidError("Invalid parameters found in Parameters.SourceIdentifier. Please review https://docs.aws.amazon.com/config/latest/developerguide/managed-rules-by-aws-config.html") + + try: + source_events = getattr(config.ResourceType, rule_parameters["Parameters"]["SourceEvents"].upper().replace("AWS::", "").replace("::", "_")) + except: + raise RdkParametersInvalidError("Invalid parameters found in Parameters.SourceEvents. Please review https://docs.aws.amazon.com/config/latest/developerguide/resource-config-reference.html") + + config.ManagedRule(self, rule_name, + identifier=source_identifier, + input_parameters={ + "max_access_key_age": 60 + }, + rule_scope=config.RuleScope.from_resources([source_events]) + ) + # elif rule_parameters["Parameters"]["SourceRuntime"] in rdk_supported_custom_rule_runtime: + # # Lambda function containing logic that evaluates compliance with the rule. + # eval_compliance_fn = lambda_.Function(self, "CustomFunction", + # code=lambda_.Code.asset(Path(self.node.try_get_context("rules_dir"))), + # handler="index.handler", + # runtime=lambda_.Runtime.NODEJS_14_X + # ) + + # # A custom rule that runs on configuration changes of EC2 instances + # config.CustomRule(self, "Custom", + # configuration_changes=True, + # lambda_function=eval_compliance_fn, + # rule_scope=config.RuleScope.from_resource(config.ResourceType.EC2_INSTANCE) + # ) + else: + raise RdkRuleTypesInvalidError(f"Error loading parameters file for Rule {rule_name}") + + # # A rule to detect stack drifts + # drift_rule = config.CloudFormationStackDriftDetectionCheck(self, "Drift") + + # # Topic to which compliance notification events will be published + # compliance_topic = sns.Topic(self, "ComplianceTopic") + + # # Send notification on compliance change events + # drift_rule.on_compliance_change("ComplianceChange", + # target=targets.SnsTopic(compliance_topic) + # ) \ No newline at end of file diff --git a/rdk/frameworks/cdk/cdk/core/errors.py b/rdk/frameworks/cdk/cdk/core/errors.py new file mode 100644 index 00000000..95f14c7d --- /dev/null +++ b/rdk/frameworks/cdk/cdk/core/errors.py @@ -0,0 +1,58 @@ +""" +Well-known exceptions raised. +""" + + +class RdkParametersInvalidError(Exception): + """ + Raise invalid parameters error when rdk failed to retrieve the parameters from parameters.json + """ + def __init__(self, rule_dir): + message = ( + f"Invalid parameters found in {rule_dir}" + ) + + super().__init__(message) + +class RdkJsonInvalidError(Exception): + """ + Raise invalid json error when rdk failed to decode parameters.json + """ + def __init__(self, rule_dir): + message = ( + f"Failed to decode JSON in parameters file in {rule_dir}" + ) + + super().__init__(message) + +class RdkJsonLoadFailure(Exception): + """ + Raise load failure exception when rdk failed to load parameters.json + """ + def __init__(self, rule_dir): + message = ( + f"Error loading parameters file in {rule_dir}" + ) + + super().__init__(message) + +class RdkRuleTypesInvalidError(Exception): + """ + Raise invalid source type error for non supporting types. + """ + +class RdkNotSupportedError(Exception): + """ + Raise not supporting error for not supported action. + """ + +class RdkDuplicatedRuleNameError(Exception): + """ + Raise invalid source type error for non supporting types. + """ + def __init__(self, rule_paths): + message = ( + f"Found duplicated rule name in the following paths: {rule_paths}" + ) + + super().__init__(message) diff --git a/rdk/frameworks/cdk/cdk/core/rule_parameters.py b/rdk/frameworks/cdk/cdk/core/rule_parameters.py new file mode 100644 index 00000000..40ff5c55 --- /dev/null +++ b/rdk/frameworks/cdk/cdk/core/rule_parameters.py @@ -0,0 +1,64 @@ +from aws_cdk import Stack +from pathlib import Path +from .errors import RdkJsonInvalidError, RdkJsonLoadFailure, RdkDuplicatedRuleNameError, RdkParametersInvalidError, RdkNotSupportedError +import json + +rdk_supported_custom_rule_runtime = [ + "python3.7", + "python3.7-lib", + "python3.8", + "python3.8-lib", + "python3.9", + "python3.9-lib", + "nodejs6.10", + "nodejs8.10", + ] + +def get_rule_parameters(rule_dir: Path): + parameters_txt = rule_dir.joinpath("parameters.json").read_text() + parameters_json = {} + + try: + parameters_json = json.loads(parameters_txt) + except ValueError as ve: + raise RdkJsonInvalidError(rule_dir) + except Exception as e: + raise RdkJsonLoadFailure(rule_dir) + + return validate(parameters_json) + +def get_rule_name(rule_path: Path): + try: + rule_name = get_rule_parameters(rule_path)["Parameters"]["RuleName"] + except Exception as e: + raise RdkParametersInvalidError("Invalid parameters found in Parameters.RuleName") + if len(rule_name) > 128: + raise RdkParametersInvalidError("Error: Found Rule with name over 128 characters: {rule_name} \n Recreate the Rule with a shorter name.") + + return rule_name + +def get_deploy_rules_list(rules_dir: Path, deployment_mode: str = "all",): + deploy_rules_list = [] + print(rules_dir.absolute()) + for path in rules_dir.absolute().glob("**/parameters.json"): + print(path) + if rules_dir.absolute().joinpath("rdk").as_posix() not in path.as_posix(): + if deployment_mode == "all": + deploy_rules_list.append(path.parent) + # Add support for java and cs + # elif deployment_mode == "rule_names": + # for path in rules_dir.absolute().glob("**/parameters.json"): + # if rules_dir.absolute().joinpath("rdk").as_posix() not in path.as_posix(): + # if command_arg == get_rule_name(path.parent): + # rule_dir_paths.append(path.parent.as_posix()) + # if len(rule_dir_paths) > 1: + # raise RdkDuplicatedRuleNameError(rule_dir_paths) + else: + raise RdkNotSupportedError('Invalid Option: Specify Rule Name or RuleSet or empty for all.') + + print(deploy_rules_list) + return deploy_rules_list + +def validate(parameters_json: dict): + #TODO + return parameters_json \ No newline at end of file diff --git a/rdk/runners/cdk.py b/rdk/runners/cdk.py index 38906671..7210dc23 100644 --- a/rdk/runners/cdk.py +++ b/rdk/runners/cdk.py @@ -23,7 +23,6 @@ class CdkRunner(BaseRunner): """ - root_module: Path rules_dir: Path cdk_app_dir: Path = field(init=False) @@ -47,7 +46,7 @@ def synthesize(self): "cdk", "synth", "--context", - "rules_dir=" + Path().absolute().as_posix() + "/" + "MyRuleCFNGuard" + "rules_dir=" + self.rules_dir.as_posix() ] @@ -69,7 +68,7 @@ def bootstrap(self): "cdk", "bootstrap", "--context", - "rules_dir=" + Path().absolute().as_posix() + "/" + "MyRuleCFNGuard" + "rules_dir=" + self.rules_dir.as_posix() ] @@ -91,7 +90,7 @@ def deploy(self): "cdk", "deploy", "--context", - "rules_dir=" + Path().absolute().as_posix() + "/" + "MyRuleCFNGuard", + "rules_dir=" + self.rules_dir.as_posix(), "--require-approval", "never" ] From 12312476b9816f03a7b35407f774ea9c1431729c Mon Sep 17 00:00:00 2001 From: Ricky Chau Date: Tue, 11 Apr 2023 18:40:33 -0400 Subject: [PATCH 05/23] feat(test): add rdk test comand --- Makefile | 2 +- rdk/cli/commands/test.py | 17 +++ rdk/cli/main.py | 34 ++++- rdk/core/rules_test.py | 118 ++++++++++++++++++ rdk/frameworks/cdk/cdk/cdk_stack.py | 36 ++---- rdk/frameworks/cdk/cdk/core/custom_policy.py | 55 ++++++++ rdk/frameworks/cdk/cdk/core/errors.py | 6 - rdk/frameworks/cdk/cdk/core/managed_rule.py | 54 ++++++++ .../cdk/cdk/core/rule_parameters.py | 7 +- rdk/runners/base.py | 7 +- rdk/runners/cdk.py | 4 +- rdk/runners/cfn_guard.py | 58 +++++++++ 12 files changed, 348 insertions(+), 50 deletions(-) create mode 100644 rdk/cli/commands/test.py create mode 100644 rdk/core/rules_test.py create mode 100644 rdk/frameworks/cdk/cdk/core/custom_policy.py create mode 100644 rdk/frameworks/cdk/cdk/core/managed_rule.py create mode 100644 rdk/runners/cfn_guard.py diff --git a/Makefile b/Makefile index abd26e24..e8802661 100644 --- a/Makefile +++ b/Makefile @@ -25,7 +25,7 @@ SHELL := /usr/bin/env bash # This is intended to run as early as possible to ensure that various things # that is Makefile depends on is available. -override prereq_binaries := git python3 pipenv +override prereq_binaries := git python3 pipenv cfn-guard $(foreach bin,$(prereq_binaries),\ $(if $(shell command -v $(bin) 2>/dev/null),,\ $(error '$(bin)' is not installed or available in PATH)\ diff --git a/rdk/cli/commands/test.py b/rdk/cli/commands/test.py new file mode 100644 index 00000000..95a6a11b --- /dev/null +++ b/rdk/cli/commands/test.py @@ -0,0 +1,17 @@ +import sys +from typing import Any, Callable, Dict, List, Optional + + +from rdk.utils.logger import get_main_logger +from rdk.core.rules_test import RulesTest + + +def run(rulenames: List[str], verbose = False): + """ + test sub-command handler. + """ + + logger = get_main_logger() + logger.info("RDK is starting ...") + + sys.exit(RulesTest(rulenames=rulenames, verbose = verbose).run()) diff --git a/rdk/cli/main.py b/rdk/cli/main.py index 66910efb..4ede5695 100644 --- a/rdk/cli/main.py +++ b/rdk/cli/main.py @@ -6,6 +6,7 @@ import rdk as this_pkg import rdk.cli.commands.init as init_cmd import rdk.cli.commands.deploy as deploy_cmd +import rdk.cli.commands.test as test_cmd import rdk.utils.logger as rdk_logger @@ -54,13 +55,13 @@ def main(): help=f"Use {this_pkg.NAME} --help for detailed usage", ) - # deploy + # init commands_parser.add_parser( "init", help="Sets up AWS Config. This will enable configuration recording in AWS and ensure necessary S3 buckets and IAM Roles are created.", ) - # # test + # deploy commands_parser_deploy = commands_parser.add_parser( "deploy", help="deploy AWS Config Rules", @@ -82,6 +83,28 @@ def main(): help="Dry run mode", ) + # test + commands_parser_test = commands_parser.add_parser( + "test", + help="deploy AWS Config Rules", + ) + + commands_parser_test.add_argument( + "rulename", + metavar="", + nargs="*", + default="", + help="Rule name(s) to test. Unit test of the rule(s) will be executed." + ) + + commands_parser_test.add_argument( + "-v", + "--verbose", + action="store_true", + default=False, + help="Verbose mode", + ) + # _pytest -- hidden command used by pytests commands_parser.add_parser( "_pytest", @@ -112,4 +135,11 @@ def main(): deploy_cmd.run( rulenames=args.rulename, dryrun=args.dryrun, + ) + + # handle: deploy + if args.command == "test": + test_cmd.run( + rulenames=args.rulename, + verbose=args.verbose, ) \ No newline at end of file diff --git a/rdk/core/rules_test.py b/rdk/core/rules_test.py new file mode 100644 index 00000000..aacf1edc --- /dev/null +++ b/rdk/core/rules_test.py @@ -0,0 +1,118 @@ +import logging +import time +from dataclasses import dataclass, field +from pathlib import Path +from io import StringIO +from typing import List, Dict, Any +import json + +import rdk.utils.logger as rdk_logger +from rdk.runners.cfn_guard import CfnGuardRunner +import unittest +from rdk.frameworks.cdk.cdk.core.rule_parameters import get_rule_name, get_deploy_rules_list, get_rule_parameters + +@dataclass +class RulesTest: + """ + Defines rules for unit test. + + Parameters: + + * **`rulenames`** (_str_): list of rule names to deploy + + """ + + rulenames: List[str] + verbose: bool = False + + logger: logging.Logger = field(init=False) + + def __post_init__(self): + self.logger = rdk_logger.get_main_logger() + + def run(self): + self.logger.info("Running local test!") + tests_successful = True + rules_list = [] + test_report = { + "pytest_results": [], + "cfn_guard_results": [] + } + cwd = Path().absolute() + + # Construct our list of rules to test. + if self.rulenames: + rules_list = [cwd.joinpath(rulename) for rulename in self.rulenames] + else: + rules_list = get_deploy_rules_list(rules_dir=cwd) + + for rule_path in rules_list: + rule_name = get_rule_name(rule_path) + rule_parameters = get_rule_parameters(rule_path) + runtime = rule_parameters["Parameters"]["SourceRuntime"] + + self.logger.info("Testing " + rule_name) + test_dir = cwd.joinpath(rule_path) + self.logger.info("Looking for tests in " + test_dir.as_posix()) + + if runtime in ( + "python3.7", + "python3.7-lib", + "python3.8", + "python3.8-lib", + "python3.9", + "python3.9-lib", + ): + test_report["pytest_results"].append(self._run_pytest(test_dir)) + elif runtime == "cloudformation-guard2.0": + test_report["cfn_guard_results"] += self._run_cfn_guard_test(test_dir) + else: + self.logger.info(f"Skipping {rule_name} - The Custom Rule Runtime or Managed Rule are not supported for unit testing.") + + exit(self._result_summary(test_report)) + + def _run_pytest(self, test_dir: Path): + loader = unittest.TestLoader() + suite = loader.discover(test_dir, pattern = "*_test.py") + results = unittest.TextTestRunner(buffer=self.verbose, verbosity=2).run(suite) + if len(results.errors) == 0 and len(results.failures) == 0: + status = "PASSED" + else: + status = "FAILED" + return { "rule_dir": test_dir.name, "status": status,"test_run": results.testsRun, "errors": results.errors, "failures": results.failures } + + def _run_cfn_guard_test(self, test_dir: Path): + report = [] + for test_path in test_dir.glob("**/*"): + if any(filetype in test_path.as_posix() for filetype in ["json", "yaml", "yml"]) and "parameters.json" not in test_path.as_posix(): + cfn_guard_runner = CfnGuardRunner(rules_file=test_dir.joinpath("rule_code.rules"), test_data=test_path, verbose=self.verbose) + try: + results = cfn_guard_runner.test() + report.append({"rule_dir": f"{test_dir.name}/{test_path.name}", "status": "PASSED", "test_run": results.count("Test Case #"), "errors": [], "failures": []}) + except Exception as e: + self.logger.info(results) + report.append({"rule_dir": f"{test_dir.name}/{test_path.name}", "status": "FAILED", "test_run": results.count("Test Case #"), "errors": [e], "failures": [results]}) + return report + + def _result_summary(self, test_report: Dict[str, Any]): + pytest_results = test_report["pytest_results"] + cfn_guard_results = test_report["cfn_guard_results"] + self.logger.info(test_report) + exit_code = self._show_result(pytest_results) + exit_code = self._show_result(cfn_guard_results) and exit_code + return exit_code + + def _show_result(self, report_results: Dict[str, Any]): + exit_code = 0 + for result in report_results: + self.logger.info(f"{result['rule_dir']} - status: {result['status']} tests_run:{result['test_run']}") + if result["errors"]: + exit_code = 1 + for error in result["errors"]: + self.logger.info(f" Error found: {error}") + if result["failures"]: + exit_code = 2 + for failure in result["failures"]: + self.logger.info(f" Test failures found: {failure}") + return exit_code + \ No newline at end of file diff --git a/rdk/frameworks/cdk/cdk/cdk_stack.py b/rdk/frameworks/cdk/cdk/cdk_stack.py index 8a6af557..a04429b7 100644 --- a/rdk/frameworks/cdk/cdk/cdk_stack.py +++ b/rdk/frameworks/cdk/cdk/cdk_stack.py @@ -3,9 +3,12 @@ aws_config as config, aws_lambda as lambda_, ) +from dataclasses import asdict from constructs import Construct from pathlib import Path from .core.rule_parameters import get_rule_parameters, get_deploy_rules_list, get_rule_name, rdk_supported_custom_rule_runtime +from .core.custom_policy import CustomPolicy +from .core.managed_rule import ManagedRule from .core.errors import RdkRuleTypesInvalidError, RdkParametersInvalidError import json @@ -22,36 +25,11 @@ def __init__(self, scope: Construct, construct_id: str, **kwargs) -> None: rule_parameters = get_rule_parameters(rule_path) if rule_parameters["Parameters"]["SourceRuntime"] == "cloudformation-guard2.0": - policy_text = rule_path.joinpath("rule_code.guard").read_text() - - try: - source_events = getattr(config.ResourceType, rule_parameters["Parameters"]["SourceEvents"].upper().replace("AWS::", "").replace("::", "_")) - except: - raise RdkParametersInvalidError("Invalid parameters found in Parameters.SourceEvents. Please review https://docs.aws.amazon.com/config/latest/developerguide/resource-config-reference.html") - - config.CustomPolicy(self, rule_name, - policy_text=policy_text, - enable_debug_log=True, - rule_scope=config.RuleScope.from_resources([source_events]) - ) + arg = CustomPolicy(policy_text=rule_path.joinpath("rule_code.rules").read_text(), rule_parameters=rule_parameters) + config.CustomPolicy(self, rule_name, **asdict(arg)) elif rule_parameters["Parameters"]["SourceIdentifier"]: - try: - source_identifier = getattr(config.ManagedRuleIdentifiers, rule_parameters["Parameters"]["SourceIdentifier"].upper().replace("-", "_")) - except: - raise RdkParametersInvalidError("Invalid parameters found in Parameters.SourceIdentifier. Please review https://docs.aws.amazon.com/config/latest/developerguide/managed-rules-by-aws-config.html") - - try: - source_events = getattr(config.ResourceType, rule_parameters["Parameters"]["SourceEvents"].upper().replace("AWS::", "").replace("::", "_")) - except: - raise RdkParametersInvalidError("Invalid parameters found in Parameters.SourceEvents. Please review https://docs.aws.amazon.com/config/latest/developerguide/resource-config-reference.html") - - config.ManagedRule(self, rule_name, - identifier=source_identifier, - input_parameters={ - "max_access_key_age": 60 - }, - rule_scope=config.RuleScope.from_resources([source_events]) - ) + arg = ManagedRule(rule_parameters=rule_parameters) + config.ManagedRule(self, rule_name, **asdict(arg)) # elif rule_parameters["Parameters"]["SourceRuntime"] in rdk_supported_custom_rule_runtime: # # Lambda function containing logic that evaluates compliance with the rule. # eval_compliance_fn = lambda_.Function(self, "CustomFunction", diff --git a/rdk/frameworks/cdk/cdk/core/custom_policy.py b/rdk/frameworks/cdk/cdk/core/custom_policy.py new file mode 100644 index 00000000..65eb21db --- /dev/null +++ b/rdk/frameworks/cdk/cdk/core/custom_policy.py @@ -0,0 +1,55 @@ +from dataclasses import dataclass, field +from typing import Any, Dict, Optional, List +from aws_cdk import ( + aws_config as config +) +from .errors import RdkParametersInvalidError +import json + +@dataclass +class CustomPolicy: + """ + Defines Custom Policy. + + Parameters: + + * **`policy_text`** (_str_): The policy definition containing the logic for your AWS Config Custom Policy rule. + * **`enable_debug_log`** (_bool_): Optional - The boolean expression for enabling debug logging for your AWS Config Custom Policy rule. Default: false + * **`config_rule_name`** (_str_): Optional - A name for the AWS Config rule. Default: - CloudFormation generated name + * **`description`** (_str_): Optional - A description about this AWS Config rule. Default: - No description + * **`input_parameters`** (_Dict[str, Any]_): Optional - Input parameter values that are passed to the AWS Config rule. Default: - No input parameters + * **`maximum_execution_frequency`** (_MaximumExecutionFrequency_): Optional - The maximum frequency at which the AWS Config rule runs evaluations. Default: MaximumExecutionFrequency.TWENTY_FOUR_HOURS + * **`rule_scope`** (_RuleScope_): Optional - Defines which resources trigger an evaluation for an AWS Config rule. Default: - evaluations for the rule are triggered when any resource in the recording group changes. + + """ + + policy_text: str = field(init=False) + enable_debug_log: Optional[bool] = False + config_rule_name: Optional[str] = None + description: Optional[str] = None + input_parameters: Optional[Dict[str, Any]] = None + maximum_execution_frequency: Optional[config.MaximumExecutionFrequency] = config.MaximumExecutionFrequency.TWENTY_FOUR_HOURS + rule_scope: Optional[config.RuleScope] = None + + def __init__(self, policy_text: str, rule_parameters: dict): + param = rule_parameters["Parameters"] + self.policy_text = policy_text + if "EnableDebugLogDelivery" in param: + self.enable_debug_log = True + if "Description" in param: + print(param["Description"]) + self.description = param["Description"] + if "InputParameters" in param: + self.input_parameters = json.loads(param["InputParameters"]) + if "MaximumExecutionFrequency" in param: + try: + maximum_execution_frequency = getattr(config.MaximumExecutionFrequency, param["SourcePeriodic"].upper()) + except: + raise RdkParametersInvalidError("Invalid parameters found in Parameters.MaximumExecutionFrequency. Please review https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-config-configrule.html#cfn-config-configrule-maximumexecutionfrequency") + self.maximum_execution_frequency = maximum_execution_frequency + if "SourceEvents" in param: + try: + source_events = getattr(config.ResourceType, param["SourceEvents"].upper().replace("AWS::", "").replace("::", "_")) + except: + raise RdkParametersInvalidError("Invalid parameters found in Parameters.SourceEvents. Please review https://docs.aws.amazon.com/config/latest/developerguide/resource-config-reference.html") + self.rule_scope = config.RuleScope.from_resources([source_events]) diff --git a/rdk/frameworks/cdk/cdk/core/errors.py b/rdk/frameworks/cdk/cdk/core/errors.py index 95f14c7d..a64cca29 100644 --- a/rdk/frameworks/cdk/cdk/core/errors.py +++ b/rdk/frameworks/cdk/cdk/core/errors.py @@ -7,12 +7,6 @@ class RdkParametersInvalidError(Exception): """ Raise invalid parameters error when rdk failed to retrieve the parameters from parameters.json """ - def __init__(self, rule_dir): - message = ( - f"Invalid parameters found in {rule_dir}" - ) - - super().__init__(message) class RdkJsonInvalidError(Exception): """ diff --git a/rdk/frameworks/cdk/cdk/core/managed_rule.py b/rdk/frameworks/cdk/cdk/core/managed_rule.py new file mode 100644 index 00000000..96548946 --- /dev/null +++ b/rdk/frameworks/cdk/cdk/core/managed_rule.py @@ -0,0 +1,54 @@ +from dataclasses import dataclass, field +from typing import Any, Dict, Optional, List +from aws_cdk import ( + aws_config as config +) +from .errors import RdkParametersInvalidError +import json + +@dataclass +class ManagedRule: + """ + Defines Managed Rule. + + Parameters: + + * **`identifier`** (_str_): The policy definition containing the logic for your AWS Config Custom Policy rule. + * **`config_rule_name`** (_str_): Optional - A name for the AWS Config rule. Default: - CloudFormation generated name + * **`description`** (_str_): Optional - A description about this AWS Config rule. Default: - No description + * **`input_parameters`** (_Dict[str, Any]_): Optional - Input parameter values that are passed to the AWS Config rule. Default: - No input parameters + * **`maximum_execution_frequency`** (_MaximumExecutionFrequency_): Optional - The maximum frequency at which the AWS Config rule runs evaluations. Default: MaximumExecutionFrequency.TWENTY_FOUR_HOURS + * **`rule_scope`** (_RuleScope_): Optional - Defines which resources trigger an evaluation for an AWS Config rule. Default: - evaluations for the rule are triggered when any resource in the recording group changes. + + """ + + identifier: config.ManagedRuleIdentifiers = field(init=False) + config_rule_name: Optional[str] = None + description: Optional[str] = None + input_parameters: Optional[Dict[str, Any]] = None + maximum_execution_frequency: Optional[config.MaximumExecutionFrequency] = config.MaximumExecutionFrequency.TWENTY_FOUR_HOURS + rule_scope: Optional[config.RuleScope] = None + + def __init__(self, rule_parameters: dict): + param = rule_parameters["Parameters"] + if param["SourceIdentifier"]: + try: + self.identifier = getattr(config.ManagedRuleIdentifiers, param["SourceIdentifier"].upper().replace("-", "_")) + except: + raise RdkParametersInvalidError("Invalid parameters found in Parameters.SourceIdentifier. Please review https://docs.aws.amazon.com/config/latest/developerguide/managed-rules-by-aws-config.html") + if "Description" in param: + self.description = param["Description"] + if "InputParameters" in param: + self.input_parameters = json.loads(param["InputParameters"]) + if "MaximumExecutionFrequency" in param: + try: + maximum_execution_frequency = getattr(config.MaximumExecutionFrequency, param["SourcePeriodic"].upper()) + except: + raise RdkParametersInvalidError("Invalid parameters found in Parameters.MaximumExecutionFrequency. Please review https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-config-configrule.html#cfn-config-configrule-maximumexecutionfrequency") + self.maximum_execution_frequency = maximum_execution_frequency + if "SourceEvents" in param: + try: + source_events = getattr(config.ResourceType, param["SourceEvents"].upper().replace("AWS::", "").replace("::", "_")) + except: + raise RdkParametersInvalidError("Invalid parameters found in Parameters.SourceEvents. Please review https://docs.aws.amazon.com/config/latest/developerguide/resource-config-reference.html") + self.rule_scope = config.RuleScope.from_resources([source_events]) \ No newline at end of file diff --git a/rdk/frameworks/cdk/cdk/core/rule_parameters.py b/rdk/frameworks/cdk/cdk/core/rule_parameters.py index 40ff5c55..a8d9f564 100644 --- a/rdk/frameworks/cdk/cdk/core/rule_parameters.py +++ b/rdk/frameworks/cdk/cdk/core/rule_parameters.py @@ -28,10 +28,11 @@ def get_rule_parameters(rule_dir: Path): return validate(parameters_json) def get_rule_name(rule_path: Path): + rule_parameters = get_rule_parameters(rule_path) try: - rule_name = get_rule_parameters(rule_path)["Parameters"]["RuleName"] + rule_name = rule_parameters["Parameters"]["RuleName"] except Exception as e: - raise RdkParametersInvalidError("Invalid parameters found in Parameters.RuleName") + raise RdkParametersInvalidError(f"Invalid parameters found in Parameters.RuleName in {rule_path}") if len(rule_name) > 128: raise RdkParametersInvalidError("Error: Found Rule with name over 128 characters: {rule_name} \n Recreate the Rule with a shorter name.") @@ -39,7 +40,6 @@ def get_rule_name(rule_path: Path): def get_deploy_rules_list(rules_dir: Path, deployment_mode: str = "all",): deploy_rules_list = [] - print(rules_dir.absolute()) for path in rules_dir.absolute().glob("**/parameters.json"): print(path) if rules_dir.absolute().joinpath("rdk").as_posix() not in path.as_posix(): @@ -56,7 +56,6 @@ def get_deploy_rules_list(rules_dir: Path, deployment_mode: str = "all",): else: raise RdkNotSupportedError('Invalid Option: Specify Rule Name or RuleSet or empty for all.') - print(deploy_rules_list) return deploy_rules_list def validate(parameters_json: dict): diff --git a/rdk/runners/base.py b/rdk/runners/base.py index 2712198c..75836d3b 100644 --- a/rdk/runners/base.py +++ b/rdk/runners/base.py @@ -206,12 +206,7 @@ def get_python_executable(self) -> str: # pylint: disable=no-self-use def _check_if_command_is_allowed(self, cmd: str): if cmd not in [ "cdk", - "ls", - "pwd", - # "custodian", - # "git", - # "pytest", - # "terraform", + "cfn-guard", self.get_python_executable(), ]: raise RdkCommandNotAllowedError(f"Unsupported command provided: {cmd}") diff --git a/rdk/runners/cdk.py b/rdk/runners/cdk.py index 7210dc23..26270622 100644 --- a/rdk/runners/cdk.py +++ b/rdk/runners/cdk.py @@ -18,8 +18,8 @@ class CdkRunner(BaseRunner): Parameters: - * **`root_module`** (_Path_): Path to the cdk root module - * **`config`** (_Config_): `rdk.core.config.Config` object + * **`rules_dir`** (_Path_): Path to the rules directory for deployment + * **`cdk_app_dir`** (_Path_): Path to the embedded CDK framework root directory """ diff --git a/rdk/runners/cfn_guard.py b/rdk/runners/cfn_guard.py new file mode 100644 index 00000000..a43f9068 --- /dev/null +++ b/rdk/runners/cfn_guard.py @@ -0,0 +1,58 @@ +import copy +import json +import os +from dataclasses import dataclass, field +from pathlib import Path +import shutil +from typing import Any, Dict, List, Optional + +import rdk as this_pkg +from rdk.runners.base import BaseRunner + + +@dataclass +class CfnGuardRunner(BaseRunner): + """ + Helper class to run cfn-guard commands. + https://docs.aws.amazon.com/cfn-guard/latest/ug/testing-rules.html + + Parameters: + + * **`rules_file`** (_Path_): Provides the name of a rules file. + * **`config`** (_Config_): Provides the name of a file or directory for data files in either JSON or YAML format. + + """ + + rules_file: Path + test_data: Path + verbose: bool = False + + def __post_init__(self): + super().__post_init__() + + def test(self): + """ + Executes `cfn-guard test`. + + Parameters: + """ + cmd = [ + "cfn-guard", + "test", + "--rules-file", + self.rules_file.as_posix(), + "--test-data", + self.test_data.as_posix() + ] + + if self.verbose: + cmd.append("--verbose") + + self.logger.info(f"Running cfn-guard unit test on {self.rules_file.relative_to(self.rules_file.parent.parent)} with testing data: {self.test_data.relative_to(self.rules_file.parent.parent)}") + + return self.run_cmd( + cmd=cmd, + cwd=Path().absolute().as_posix(), + capture_output=True + ) + From 40bced95337cdce1a223f6b0454bb2d466b44bc7 Mon Sep 17 00:00:00 2001 From: Ricky Chau Date: Fri, 14 Apr 2023 19:14:43 -0400 Subject: [PATCH 06/23] feat(remediation): Add support for remediation configuration --- rdk/cli/commands/deploy.py | 4 +- rdk/cli/commands/init.py | 1 - rdk/cli/commands/test.py | 4 +- rdk/cli/main.py | 3 +- rdk/core/rules_deploy.py | 4 +- rdk/core/rules_test.py | 44 ++++++++---- rdk/frameworks/cdk/app.py | 2 - rdk/frameworks/cdk/cdk/cdk_stack.py | 50 +++++++++----- .../cdk/core/{ => config}/custom_policy.py | 13 ++-- .../cdk/cdk/core/{ => config}/managed_rule.py | 29 +++++--- .../core/config/remediation_configuration.py | 67 +++++++++++++++++++ rdk/frameworks/cdk/cdk/core/errors.py | 13 ++-- .../cdk/cdk/core/rule_parameters.py | 28 ++++++-- .../cdk/tests/unit/test_cdk_stack.py | 2 +- rdk/runners/base.py | 2 +- rdk/runners/cdk.py | 7 +- rdk/runners/cfn_guard.py | 7 +- rdk/utils/logger.py | 1 - tests/unit/runners/test_base.py | 1 - tests/unit/utils/test_logger.py | 1 - 20 files changed, 202 insertions(+), 81 deletions(-) rename rdk/frameworks/cdk/cdk/core/{ => config}/custom_policy.py (95%) rename rdk/frameworks/cdk/cdk/core/{ => config}/managed_rule.py (74%) create mode 100644 rdk/frameworks/cdk/cdk/core/config/remediation_configuration.py diff --git a/rdk/cli/commands/deploy.py b/rdk/cli/commands/deploy.py index 38cb16f4..4ab2b72a 100644 --- a/rdk/cli/commands/deploy.py +++ b/rdk/cli/commands/deploy.py @@ -1,16 +1,14 @@ import sys from typing import Any, Callable, Dict, List, Optional - -from rdk.utils.logger import get_main_logger from rdk.core.rules_deploy import RulesDeploy +from rdk.utils.logger import get_main_logger def run(rulenames: List[str], dryrun: bool): """ test sub-command handler. """ - logger = get_main_logger() logger.info("RDK is starting ...") diff --git a/rdk/cli/commands/init.py b/rdk/cli/commands/init.py index 8016074b..bb48ed82 100644 --- a/rdk/cli/commands/init.py +++ b/rdk/cli/commands/init.py @@ -8,7 +8,6 @@ def run(): """ init sub-command handler. """ - logger = get_main_logger() logger.info("AWS Config initializing is starting ...") diff --git a/rdk/cli/commands/test.py b/rdk/cli/commands/test.py index 95a6a11b..dfd3414c 100644 --- a/rdk/cli/commands/test.py +++ b/rdk/cli/commands/test.py @@ -1,16 +1,14 @@ import sys from typing import Any, Callable, Dict, List, Optional - -from rdk.utils.logger import get_main_logger from rdk.core.rules_test import RulesTest +from rdk.utils.logger import get_main_logger def run(rulenames: List[str], verbose = False): """ test sub-command handler. """ - logger = get_main_logger() logger.info("RDK is starting ...") diff --git a/rdk/cli/main.py b/rdk/cli/main.py index 4ede5695..a6c7aaf1 100644 --- a/rdk/cli/main.py +++ b/rdk/cli/main.py @@ -4,8 +4,8 @@ from pathlib import Path import rdk as this_pkg -import rdk.cli.commands.init as init_cmd import rdk.cli.commands.deploy as deploy_cmd +import rdk.cli.commands.init as init_cmd import rdk.cli.commands.test as test_cmd import rdk.utils.logger as rdk_logger @@ -14,7 +14,6 @@ def main(): """ Main CLI handler. """ - # Main parser main_parser = argparse.ArgumentParser( prog=this_pkg.CLI_NAME, diff --git a/rdk/core/rules_deploy.py b/rdk/core/rules_deploy.py index 7508e6a8..79eac1ad 100644 --- a/rdk/core/rules_deploy.py +++ b/rdk/core/rules_deploy.py @@ -7,6 +7,7 @@ import rdk.utils.logger as rdk_logger from rdk.runners.cdk import CdkRunner + @dataclass class RulesDeploy: """ @@ -28,9 +29,8 @@ def __post_init__(self): def run(self): """ - Runs Rules Deployment + Runs Rules Deployment. """ - if len(self.rulenames) > 0: rules_dir = Path(self.rulenames[0]) else: diff --git a/rdk/core/rules_test.py b/rdk/core/rules_test.py index aacf1edc..0b4dad8d 100644 --- a/rdk/core/rules_test.py +++ b/rdk/core/rules_test.py @@ -1,15 +1,20 @@ +import json import logging import time +import unittest from dataclasses import dataclass, field -from pathlib import Path from io import StringIO -from typing import List, Dict, Any -import json +from pathlib import Path +from typing import Any, Dict, List import rdk.utils.logger as rdk_logger +from rdk.frameworks.cdk.cdk.core.rule_parameters import ( + get_deploy_rules_list, + get_rule_name, + get_rule_parameters, +) from rdk.runners.cfn_guard import CfnGuardRunner -import unittest -from rdk.frameworks.cdk.cdk.core.rule_parameters import get_rule_name, get_deploy_rules_list, get_rule_parameters + @dataclass class RulesTest: @@ -64,7 +69,7 @@ def run(self): "python3.9-lib", ): test_report["pytest_results"].append(self._run_pytest(test_dir)) - elif runtime == "cloudformation-guard2.0": + elif runtime in ["cloudformation-guard2.0", "guard-2.x.x"]: test_report["cfn_guard_results"] += self._run_cfn_guard_test(test_dir) else: self.logger.info(f"Skipping {rule_name} - The Custom Rule Runtime or Managed Rule are not supported for unit testing.") @@ -83,36 +88,47 @@ def _run_pytest(self, test_dir: Path): def _run_cfn_guard_test(self, test_dir: Path): report = [] + results = "" for test_path in test_dir.glob("**/*"): - if any(filetype in test_path.as_posix() for filetype in ["json", "yaml", "yml"]) and "parameters.json" not in test_path.as_posix(): - cfn_guard_runner = CfnGuardRunner(rules_file=test_dir.joinpath("rule_code.rules"), test_data=test_path, verbose=self.verbose) + self.logger.info(f"Running test {test_path}") + self.logger.info(f"Test file: {test_path.suffix} {test_path.name}") + if test_path.suffix in [".json", ".yaml", ".yml"] and test_path.name != "parameters.json": + cfn_guard_runner = CfnGuardRunner(rules_file=test_dir.joinpath("rule_code.guard"), test_data=test_path, verbose=self.verbose) try: results = cfn_guard_runner.test() report.append({"rule_dir": f"{test_dir.name}/{test_path.name}", "status": "PASSED", "test_run": results.count("Test Case #"), "errors": [], "failures": []}) except Exception as e: - self.logger.info(results) report.append({"rule_dir": f"{test_dir.name}/{test_path.name}", "status": "FAILED", "test_run": results.count("Test Case #"), "errors": [e], "failures": [results]}) return report def _result_summary(self, test_report: Dict[str, Any]): pytest_results = test_report["pytest_results"] cfn_guard_results = test_report["cfn_guard_results"] - self.logger.info(test_report) + + self.logger.info("") + self.logger.info("Test Summary:") + self.logger.info("===============") + self.logger.info("Pytest Results") + self.logger.info("===============") exit_code = self._show_result(pytest_results) + self.logger.info("================") + self.logger.info("CfnGuard Results") + self.logger.info("================") exit_code = self._show_result(cfn_guard_results) and exit_code return exit_code def _show_result(self, report_results: Dict[str, Any]): exit_code = 0 for result in report_results: - self.logger.info(f"{result['rule_dir']} - status: {result['status']} tests_run:{result['test_run']}") + self.logger.info(f"{result['rule_dir']} - ") + self.logger.info(f"\tStatus: {result['status']} tests_run:{result['test_run']}") if result["errors"]: exit_code = 1 for error in result["errors"]: - self.logger.info(f" Error found: {error}") + self.logger.info(f"\tError found: {error}") if result["failures"]: exit_code = 2 for failure in result["failures"]: - self.logger.info(f" Test failures found: {failure}") + if failure != "": + self.logger.info(f"\tTest failures found: {failure}") return exit_code - \ No newline at end of file diff --git a/rdk/frameworks/cdk/app.py b/rdk/frameworks/cdk/app.py index 32f971fa..441d0761 100644 --- a/rdk/frameworks/cdk/app.py +++ b/rdk/frameworks/cdk/app.py @@ -2,10 +2,8 @@ import os import aws_cdk as cdk - from cdk.cdk_stack import CdkStack - app = cdk.App() CdkStack(app, "CdkStack", # If you don't specify 'env', this stack will be environment-agnostic. diff --git a/rdk/frameworks/cdk/cdk/cdk_stack.py b/rdk/frameworks/cdk/cdk/cdk_stack.py index a04429b7..fd668711 100644 --- a/rdk/frameworks/cdk/cdk/cdk_stack.py +++ b/rdk/frameworks/cdk/cdk/cdk_stack.py @@ -1,16 +1,24 @@ -from aws_cdk import ( - Stack, - aws_config as config, - aws_lambda as lambda_, -) +import json from dataclasses import asdict -from constructs import Construct from pathlib import Path -from .core.rule_parameters import get_rule_parameters, get_deploy_rules_list, get_rule_name, rdk_supported_custom_rule_runtime -from .core.custom_policy import CustomPolicy -from .core.managed_rule import ManagedRule -from .core.errors import RdkRuleTypesInvalidError, RdkParametersInvalidError -import json + +from aws_cdk import Stack +from aws_cdk import aws_config as config +from aws_cdk import aws_lambda as lambda_ +from constructs import Construct + +from .core.config.custom_policy import CustomPolicy +from .core.config.managed_rule import ManagedRule +from .core.config.remediation_configuration import RemediationConfiguration + +from .core.errors import RdkParametersInvalidError, RdkRuleTypesInvalidError +from .core.rule_parameters import ( + get_deploy_rules_list, + get_rule_name, + get_rule_parameters, + rdk_supported_custom_rule_runtime, +) + class CdkStack(Stack): @@ -23,13 +31,14 @@ def __init__(self, scope: Construct, construct_id: str, **kwargs) -> None: for rule_path in rules_list: rule_name = get_rule_name(rule_path) rule_parameters = get_rule_parameters(rule_path) + generated_rule_name = "" - if rule_parameters["Parameters"]["SourceRuntime"] == "cloudformation-guard2.0": - arg = CustomPolicy(policy_text=rule_path.joinpath("rule_code.rules").read_text(), rule_parameters=rule_parameters) - config.CustomPolicy(self, rule_name, **asdict(arg)) - elif rule_parameters["Parameters"]["SourceIdentifier"]: + if "SourceRuntime" in rule_parameters["Parameters"] and rule_parameters["Parameters"]["SourceRuntime"] in ["cloudformation-guard2.0", "guard-2.x.x"]: + arg = CustomPolicy(policy_text=rule_path.joinpath("rule_code.guard").read_text(), rule_parameters=rule_parameters) + generated_rule_name = config.CustomPolicy(self, rule_name, **asdict(arg)).config_rule_name + elif "SourceIdentifier" in rule_parameters["Parameters"] and rule_parameters["Parameters"]["SourceIdentifier"]: arg = ManagedRule(rule_parameters=rule_parameters) - config.ManagedRule(self, rule_name, **asdict(arg)) + generated_rule_name = config.ManagedRule(self, rule_name, **asdict(arg)).config_rule_name # elif rule_parameters["Parameters"]["SourceRuntime"] in rdk_supported_custom_rule_runtime: # # Lambda function containing logic that evaluates compliance with the rule. # eval_compliance_fn = lambda_.Function(self, "CustomFunction", @@ -45,8 +54,13 @@ def __init__(self, scope: Construct, construct_id: str, **kwargs) -> None: # rule_scope=config.RuleScope.from_resource(config.ResourceType.EC2_INSTANCE) # ) else: - raise RdkRuleTypesInvalidError(f"Error loading parameters file for Rule {rule_name}") - + print(f"Rule type not supported for Rule {rule_name}") + continue + # raise RdkRuleTypesInvalidError(f"Error loading parameters file for Rule {rule_name}") + + if "Remediation" in rule_parameters["Parameters"] and rule_parameters["Parameters"]["Remediation"]: + arg = RemediationConfiguration(rule_parameters=rule_parameters) + config.CfnRemediationConfiguration(self, "MyCfnRemediationConfiguration", config_rule_name=generated_rule_name, **asdict(arg)) # # A rule to detect stack drifts # drift_rule = config.CloudFormationStackDriftDetectionCheck(self, "Drift") diff --git a/rdk/frameworks/cdk/cdk/core/custom_policy.py b/rdk/frameworks/cdk/cdk/core/config/custom_policy.py similarity index 95% rename from rdk/frameworks/cdk/cdk/core/custom_policy.py rename to rdk/frameworks/cdk/cdk/core/config/custom_policy.py index 65eb21db..5768840b 100644 --- a/rdk/frameworks/cdk/cdk/core/custom_policy.py +++ b/rdk/frameworks/cdk/cdk/core/config/custom_policy.py @@ -1,10 +1,11 @@ -from dataclasses import dataclass, field -from typing import Any, Dict, Optional, List -from aws_cdk import ( - aws_config as config -) -from .errors import RdkParametersInvalidError import json +from dataclasses import dataclass, field +from typing import Any, Dict, List, Optional + +from aws_cdk import aws_config as config + +from ..errors import RdkParametersInvalidError + @dataclass class CustomPolicy: diff --git a/rdk/frameworks/cdk/cdk/core/managed_rule.py b/rdk/frameworks/cdk/cdk/core/config/managed_rule.py similarity index 74% rename from rdk/frameworks/cdk/cdk/core/managed_rule.py rename to rdk/frameworks/cdk/cdk/core/config/managed_rule.py index 96548946..9f604329 100644 --- a/rdk/frameworks/cdk/cdk/core/managed_rule.py +++ b/rdk/frameworks/cdk/cdk/core/config/managed_rule.py @@ -1,10 +1,11 @@ -from dataclasses import dataclass, field -from typing import Any, Dict, Optional, List -from aws_cdk import ( - aws_config as config -) -from .errors import RdkParametersInvalidError import json +from dataclasses import dataclass, field +from typing import Any, Dict, List, Optional + +from aws_cdk import aws_config as config + +from ..errors import RdkParametersInvalidError + @dataclass class ManagedRule: @@ -32,10 +33,22 @@ class ManagedRule: def __init__(self, rule_parameters: dict): param = rule_parameters["Parameters"] if param["SourceIdentifier"]: + identifier = param["SourceIdentifier"].upper().replace("-", "_") try: - self.identifier = getattr(config.ManagedRuleIdentifiers, param["SourceIdentifier"].upper().replace("-", "_")) + self.identifier = getattr(config.ManagedRuleIdentifiers, identifier) except: - raise RdkParametersInvalidError("Invalid parameters found in Parameters.SourceIdentifier. Please review https://docs.aws.amazon.com/config/latest/developerguide/managed-rules-by-aws-config.html") + if identifier in [ + # exception list for unmatching identifiers https://docs.aws.amazon.com/cdk/api/v2/python/aws_cdk.aws_config/ManagedRuleIdentifiers.html + "MULTI_REGION_CLOUD_TRAIL_ENABLED", + "ENCRYPTED_VOLUMES", + "DESIRED_INSTANCE_TENANCY", + "DESIRED_INSTANCE_TYPE", + "INSTANCES_IN_VPC", + "INCOMING_SSH_DISABLED" + ]: + self.identifier = identifier + else: + raise RdkParametersInvalidError("Invalid parameters found in Parameters.SourceIdentifier. Please review https://docs.aws.amazon.com/config/latest/developerguide/managed-rules-by-aws-config.html") if "Description" in param: self.description = param["Description"] if "InputParameters" in param: diff --git a/rdk/frameworks/cdk/cdk/core/config/remediation_configuration.py b/rdk/frameworks/cdk/cdk/core/config/remediation_configuration.py new file mode 100644 index 00000000..3a814325 --- /dev/null +++ b/rdk/frameworks/cdk/cdk/core/config/remediation_configuration.py @@ -0,0 +1,67 @@ +import json +from dataclasses import dataclass, field +from typing import Any, Dict, List, Optional, Union + +from aws_cdk import aws_config as config +from aws_cdk import IResolvable + +from ..errors import RdkParametersInvalidError + + +@dataclass +class RemediationConfiguration: + """ + Defines Remediation Configuration. + + Parameters: + + * **`target_id `** (_str_): Target ID is the name of the SSM document. + * **`target_type `** (_str_): The type of the target. Target executes remediation. For example, SSM document. + * **`automatic`** (_Union[bool, IResolvable, None]_) : Optional - The remediation is triggered automatically. + * **`execution_controls`** (_Union[IResolvable, ExecutionControlsProperty, Dict[str, Any], None]_) : Optional - An ExecutionControls object. + * **`maximum_automatic_attempts`** (_Union[int, float, None]_) : Optional - The maximum number of failed attempts for auto-remediation. If you do not select a number, the default is 5. For example, if you specify MaximumAutomaticAttempts as 5 with RetryAttemptSeconds as 50 seconds, AWS Config will put a RemediationException on your behalf for the failing resource after the 5th failed attempt within 50 seconds. + * **`parameters`** (_Optional[Any]_) : Optional - An object of the RemediationParameterValue. For more information, see RemediationParameterValue . .. epigraph:: The type is a map of strings to RemediationParameterValue. + * **`resource_type`** (_Optional[str]_) : Optional - The type of a resource. + * **`retry_attempt_seconds`** (_Union[int, float, None]_) : Optional - Maximum time in seconds that AWS Config runs auto-remediation. If you do not select a number, the default is 60 seconds. For example, if you specify RetryAttemptSeconds as 50 seconds and MaximumAutomaticAttempts as 5, AWS Config will run auto-remediations 5 times within 50 seconds before throwing an exception. + * **`target_version`** (_Optional[str]_) : Optional - Version of the target. For example, version of the SSM document. .. epigraph:: If you make backward incompatible changes to the SSM document, you must call PutRemediationConfiguration API again to ensure the remediations can run. + + + """ + target_id: str = field(init=False) + target_type: str = field(init=False) + automatic: Optional[Union[bool, IResolvable, None]] = None + execution_controls: Optional[Union[IResolvable, config.CfnRemediationConfiguration.ExecutionControlsProperty, Dict[str, Any], None]] = None + maximum_automatic_attempts: Optional[Union[int, float, None]] = None + parameters: Optional[Any] = None + resource_type: Optional[str] = None + retry_attempt_seconds: Union[int, float, None] = None + target_version: Optional[str] = None + + + def __init__(self, rule_parameters: dict): + param = rule_parameters["Parameters"]['Remediation'] + self.target_id = param["TargetId"] + self.target_type = param["TargetType"] + if "Automatic" in param: + self.automatic = param["Automatic"] + if "ExecutionControls" in param: + if "SsmControls" in param["ExecutionControls"]: + if "ConcurrentExecutionRatePercentage" in param["ExecutionControls"]["SsmControls"]: + concurrent_execution_rate_percentage = param["ExecutionControls"]["SsmControls"]["ConcurrentExecutionRatePercentage"] + if "ErrorPercentage" in param["ExecutionControls"]["SsmControls"]: + error_percentage = param["ExecutionControls"]["SsmControls"]["ErrorPercentage"] + ssm_controls = config.CfnRemediationConfiguration.SsmControlsProperty( + concurrent_execution_rate_percentage=concurrent_execution_rate_percentage, + error_percentage=error_percentage + ) + self.execution_controls = config.CfnRemediationConfiguration.ExecutionControlsProperty(ssm_controls) + if "MaximumAutomaticAttempts" in param: + self.maximum_automatic_attempts = int(param["MaximumAutomaticAttempts"]) + if "Parameters" in param: + self.parameters = param["Parameters"] + if "ResourceType" in param: + self.resource_type = param["ResourceType"] + if "RetryAttemptSeconds" in param: + self.retry_attempt_seconds = int(param["RetryAttemptSeconds"]) + if "TargetVersion" in param: + self.target_version = param["TargetVersion"] diff --git a/rdk/frameworks/cdk/cdk/core/errors.py b/rdk/frameworks/cdk/cdk/core/errors.py index a64cca29..7d8f1b30 100644 --- a/rdk/frameworks/cdk/cdk/core/errors.py +++ b/rdk/frameworks/cdk/cdk/core/errors.py @@ -5,12 +5,13 @@ class RdkParametersInvalidError(Exception): """ - Raise invalid parameters error when rdk failed to retrieve the parameters from parameters.json + Raise invalid parameters error when rdk failed to retrieve the parameters + from parameters.json. """ class RdkJsonInvalidError(Exception): """ - Raise invalid json error when rdk failed to decode parameters.json + Raise invalid json error when rdk failed to decode parameters.json. """ def __init__(self, rule_dir): message = ( @@ -21,7 +22,7 @@ def __init__(self, rule_dir): class RdkJsonLoadFailure(Exception): """ - Raise load failure exception when rdk failed to load parameters.json + Raise load failure exception when rdk failed to load parameters.json. """ def __init__(self, rule_dir): message = ( @@ -32,17 +33,17 @@ def __init__(self, rule_dir): class RdkRuleTypesInvalidError(Exception): """ - Raise invalid source type error for non supporting types. + Raise invalid source type error for non supporting types. """ class RdkNotSupportedError(Exception): """ - Raise not supporting error for not supported action. + Raise not supporting error for not supported action. """ class RdkDuplicatedRuleNameError(Exception): """ - Raise invalid source type error for non supporting types. + Raise invalid source type error for non supporting types. """ def __init__(self, rule_paths): message = ( diff --git a/rdk/frameworks/cdk/cdk/core/rule_parameters.py b/rdk/frameworks/cdk/cdk/core/rule_parameters.py index a8d9f564..13c15208 100644 --- a/rdk/frameworks/cdk/cdk/core/rule_parameters.py +++ b/rdk/frameworks/cdk/cdk/core/rule_parameters.py @@ -1,7 +1,15 @@ -from aws_cdk import Stack -from pathlib import Path -from .errors import RdkJsonInvalidError, RdkJsonLoadFailure, RdkDuplicatedRuleNameError, RdkParametersInvalidError, RdkNotSupportedError import json +from pathlib import Path + +from aws_cdk import Stack + +from .errors import ( + RdkDuplicatedRuleNameError, + RdkJsonInvalidError, + RdkJsonLoadFailure, + RdkNotSupportedError, + RdkParametersInvalidError, +) rdk_supported_custom_rule_runtime = [ "python3.7", @@ -25,7 +33,7 @@ def get_rule_parameters(rule_dir: Path): except Exception as e: raise RdkJsonLoadFailure(rule_dir) - return validate(parameters_json) + return validate(rule_dir, parameters_json) def get_rule_name(rule_path: Path): rule_parameters = get_rule_parameters(rule_path) @@ -41,7 +49,6 @@ def get_rule_name(rule_path: Path): def get_deploy_rules_list(rules_dir: Path, deployment_mode: str = "all",): deploy_rules_list = [] for path in rules_dir.absolute().glob("**/parameters.json"): - print(path) if rules_dir.absolute().joinpath("rdk").as_posix() not in path.as_posix(): if deployment_mode == "all": deploy_rules_list.append(path.parent) @@ -58,6 +65,15 @@ def get_deploy_rules_list(rules_dir: Path, deployment_mode: str = "all",): return deploy_rules_list -def validate(parameters_json: dict): +def validate(rule_dir: Path, parameters_json: dict): #TODO + latest_schema_version = "1.0" + if "Parameters" not in parameters_json: + raise RdkParametersInvalidError(f"Error in {rule_dir}: Missing Parameters Key") + if "Version" not in parameters_json and parameters_json["Version"] != latest_schema_version: + raise RdkParametersInvalidError(f"Error in {rule_dir}: Missing Version Key. The latest supported schema version is {latest_schema_version}") + if "SourceIdentifier" not in parameters_json["Parameters"] and "SourceRuntime" not in parameters_json["Parameters"]: + raise RdkParametersInvalidError(f"Error in {rule_dir}: Missing Parameters.SourceIdentifier or Parameters.SourceRuntime is required") + if "SourcePeriodic" not in parameters_json["Parameters"] and "SourceEvents" not in parameters_json["Parameters"]: + raise RdkParametersInvalidError(f"Error in {rule_dir}: Missing Parameters.SourcePeriodic or Parameters.SourceEvents is required") return parameters_json \ No newline at end of file diff --git a/rdk/frameworks/cdk/tests/unit/test_cdk_stack.py b/rdk/frameworks/cdk/tests/unit/test_cdk_stack.py index b266920e..b5ad1c04 100644 --- a/rdk/frameworks/cdk/tests/unit/test_cdk_stack.py +++ b/rdk/frameworks/cdk/tests/unit/test_cdk_stack.py @@ -1,8 +1,8 @@ import aws_cdk as core import aws_cdk.assertions as assertions - from cdk.cdk_stack import CdkStack + # example tests. To run these tests, uncomment this file along with the example # resource in cdk/cdk_stack.py def test_sqs_queue_created(): diff --git a/rdk/runners/base.py b/rdk/runners/base.py index 75836d3b..857a4a5f 100644 --- a/rdk/runners/base.py +++ b/rdk/runners/base.py @@ -22,11 +22,11 @@ RdkCommandInvokeError, RdkCommandNotAllowedError, ) + # from rdk.utils.logger import get_testcase_logger from rdk.utils.logger import get_main_logger - @dataclass class BaseRunner: """ diff --git a/rdk/runners/cdk.py b/rdk/runners/cdk.py index 26270622..2e223560 100644 --- a/rdk/runners/cdk.py +++ b/rdk/runners/cdk.py @@ -1,9 +1,9 @@ import copy import json import os +import shutil from dataclasses import dataclass, field from pathlib import Path -import shutil from typing import Any, Dict, List, Optional import rdk as this_pkg @@ -14,7 +14,7 @@ class CdkRunner(BaseRunner): """ Helper class to run cdk commands. - https://docs.aws.amazon.com/cdk/v2/guide/hello_world.html + https://docs.aws.amazon.com/cdk/v2/guide/hello_world.html. Parameters: @@ -41,6 +41,7 @@ def synthesize(self): Executes `cdk synth`. Parameters: + """ cmd = [ "cdk", @@ -63,6 +64,7 @@ def bootstrap(self): Executes `cdk bootstrap`. Parameters: + """ cmd = [ "cdk", @@ -85,6 +87,7 @@ def deploy(self): Executes `cdk deploy`. Parameters: + """ cmd = [ "cdk", diff --git a/rdk/runners/cfn_guard.py b/rdk/runners/cfn_guard.py index a43f9068..60db0757 100644 --- a/rdk/runners/cfn_guard.py +++ b/rdk/runners/cfn_guard.py @@ -1,9 +1,9 @@ import copy import json import os +import shutil from dataclasses import dataclass, field from pathlib import Path -import shutil from typing import Any, Dict, List, Optional import rdk as this_pkg @@ -13,8 +13,8 @@ @dataclass class CfnGuardRunner(BaseRunner): """ - Helper class to run cfn-guard commands. - https://docs.aws.amazon.com/cfn-guard/latest/ug/testing-rules.html + Helper class to run cfn-guard commands. https://docs.aws.amazon.com/cfn- + guard/latest/ug/testing-rules.html. Parameters: @@ -35,6 +35,7 @@ def test(self): Executes `cfn-guard test`. Parameters: + """ cmd = [ "cfn-guard", diff --git a/rdk/utils/logger.py b/rdk/utils/logger.py index 141448b2..32603196 100644 --- a/rdk/utils/logger.py +++ b/rdk/utils/logger.py @@ -113,7 +113,6 @@ def add_file_handler(logger: logging.Logger, logfile_dir: Path): """ Add a file handler to an existing logger once the location is known. """ - friendly_name = logger.name.split(".")[-1] logfile_dir.mkdir(parents=True, exist_ok=True) diff --git a/tests/unit/runners/test_base.py b/tests/unit/runners/test_base.py index feb584b2..ffa51d38 100644 --- a/tests/unit/runners/test_base.py +++ b/tests/unit/runners/test_base.py @@ -33,7 +33,6 @@ def test_get_python_executable(monkeypatch: pytest.MonkeyPatch): def test_run_cmd_basic(mocker: MockerFixture): - # Init runner = BaseRunner() subprocess_popen_mock = mocker.patch("subprocess.Popen") diff --git a/tests/unit/utils/test_logger.py b/tests/unit/utils/test_logger.py index 0edf9c83..b40a7199 100644 --- a/tests/unit/utils/test_logger.py +++ b/tests/unit/utils/test_logger.py @@ -123,7 +123,6 @@ def test_update_stream_handler_level(): def test_get_testcase_logger(monkeypatch: pytest.MonkeyPatch): - with monkeypatch.context() as m: logger = rdk_logger.get_testcase_logger() assert "unknown" in logger.name From da1144a850604f06a0799bd4ffc8c297aa9f36dd Mon Sep 17 00:00:00 2001 From: Ricky Chau Date: Tue, 18 Apr 2023 13:38:47 -0400 Subject: [PATCH 07/23] feat(remedi): add remediation configuration --- rdk/frameworks/cdk/cdk/cdk_stack.py | 13 ++++++------- rdk/frameworks/cdk/cdk/core/config/custom_policy.py | 6 ++++-- rdk/frameworks/cdk/cdk/core/config/managed_rule.py | 8 +++++--- .../cdk/core/config/remediation_configuration.py | 4 ++++ 4 files changed, 19 insertions(+), 12 deletions(-) diff --git a/rdk/frameworks/cdk/cdk/cdk_stack.py b/rdk/frameworks/cdk/cdk/cdk_stack.py index fd668711..d74a8545 100644 --- a/rdk/frameworks/cdk/cdk/cdk_stack.py +++ b/rdk/frameworks/cdk/cdk/cdk_stack.py @@ -31,14 +31,13 @@ def __init__(self, scope: Construct, construct_id: str, **kwargs) -> None: for rule_path in rules_list: rule_name = get_rule_name(rule_path) rule_parameters = get_rule_parameters(rule_path) - generated_rule_name = "" if "SourceRuntime" in rule_parameters["Parameters"] and rule_parameters["Parameters"]["SourceRuntime"] in ["cloudformation-guard2.0", "guard-2.x.x"]: - arg = CustomPolicy(policy_text=rule_path.joinpath("rule_code.guard").read_text(), rule_parameters=rule_parameters) - generated_rule_name = config.CustomPolicy(self, rule_name, **asdict(arg)).config_rule_name + arg = CustomPolicy(policy_text=rule_path.joinpath("rule_code.guard").read_text(), rule_parameters=rule_parameters, config_rule_name = rule_name) + config.CustomPolicy(self, rule_name, **asdict(arg)).config_rule_name elif "SourceIdentifier" in rule_parameters["Parameters"] and rule_parameters["Parameters"]["SourceIdentifier"]: - arg = ManagedRule(rule_parameters=rule_parameters) - generated_rule_name = config.ManagedRule(self, rule_name, **asdict(arg)).config_rule_name + arg = ManagedRule(rule_parameters=rule_parameters, config_rule_name = rule_name) + config.ManagedRule(self, rule_name, **asdict(arg)).config_rule_name # elif rule_parameters["Parameters"]["SourceRuntime"] in rdk_supported_custom_rule_runtime: # # Lambda function containing logic that evaluates compliance with the rule. # eval_compliance_fn = lambda_.Function(self, "CustomFunction", @@ -59,8 +58,8 @@ def __init__(self, scope: Construct, construct_id: str, **kwargs) -> None: # raise RdkRuleTypesInvalidError(f"Error loading parameters file for Rule {rule_name}") if "Remediation" in rule_parameters["Parameters"] and rule_parameters["Parameters"]["Remediation"]: - arg = RemediationConfiguration(rule_parameters=rule_parameters) - config.CfnRemediationConfiguration(self, "MyCfnRemediationConfiguration", config_rule_name=generated_rule_name, **asdict(arg)) + arg = RemediationConfiguration(rule_parameters=rule_parameters, config_rule_name = rule_name) + config.CfnRemediationConfiguration(self, "MyCfnRemediationConfiguration", **asdict(arg)) # # A rule to detect stack drifts # drift_rule = config.CloudFormationStackDriftDetectionCheck(self, "Drift") diff --git a/rdk/frameworks/cdk/cdk/core/config/custom_policy.py b/rdk/frameworks/cdk/cdk/core/config/custom_policy.py index 5768840b..40719467 100644 --- a/rdk/frameworks/cdk/cdk/core/config/custom_policy.py +++ b/rdk/frameworks/cdk/cdk/core/config/custom_policy.py @@ -16,7 +16,7 @@ class CustomPolicy: * **`policy_text`** (_str_): The policy definition containing the logic for your AWS Config Custom Policy rule. * **`enable_debug_log`** (_bool_): Optional - The boolean expression for enabling debug logging for your AWS Config Custom Policy rule. Default: false - * **`config_rule_name`** (_str_): Optional - A name for the AWS Config rule. Default: - CloudFormation generated name + * **`config_rule_name`** (_str_): A name for the AWS Config rule. Default: - CloudFormation generated name * **`description`** (_str_): Optional - A description about this AWS Config rule. Default: - No description * **`input_parameters`** (_Dict[str, Any]_): Optional - Input parameter values that are passed to the AWS Config rule. Default: - No input parameters * **`maximum_execution_frequency`** (_MaximumExecutionFrequency_): Optional - The maximum frequency at which the AWS Config rule runs evaluations. Default: MaximumExecutionFrequency.TWENTY_FOUR_HOURS @@ -26,7 +26,7 @@ class CustomPolicy: policy_text: str = field(init=False) enable_debug_log: Optional[bool] = False - config_rule_name: Optional[str] = None + config_rule_name: str = field(init=False) description: Optional[str] = None input_parameters: Optional[Dict[str, Any]] = None maximum_execution_frequency: Optional[config.MaximumExecutionFrequency] = config.MaximumExecutionFrequency.TWENTY_FOUR_HOURS @@ -35,6 +35,8 @@ class CustomPolicy: def __init__(self, policy_text: str, rule_parameters: dict): param = rule_parameters["Parameters"] self.policy_text = policy_text + if "RuleName" in param: + self.config_rule_name = param["RuleName"] if "EnableDebugLogDelivery" in param: self.enable_debug_log = True if "Description" in param: diff --git a/rdk/frameworks/cdk/cdk/core/config/managed_rule.py b/rdk/frameworks/cdk/cdk/core/config/managed_rule.py index 9f604329..97043a1d 100644 --- a/rdk/frameworks/cdk/cdk/core/config/managed_rule.py +++ b/rdk/frameworks/cdk/cdk/core/config/managed_rule.py @@ -15,7 +15,7 @@ class ManagedRule: Parameters: * **`identifier`** (_str_): The policy definition containing the logic for your AWS Config Custom Policy rule. - * **`config_rule_name`** (_str_): Optional - A name for the AWS Config rule. Default: - CloudFormation generated name + * **`config_rule_name`** (_str_): A name for the AWS Config rule. Default: - CloudFormation generated name * **`description`** (_str_): Optional - A description about this AWS Config rule. Default: - No description * **`input_parameters`** (_Dict[str, Any]_): Optional - Input parameter values that are passed to the AWS Config rule. Default: - No input parameters * **`maximum_execution_frequency`** (_MaximumExecutionFrequency_): Optional - The maximum frequency at which the AWS Config rule runs evaluations. Default: MaximumExecutionFrequency.TWENTY_FOUR_HOURS @@ -24,7 +24,7 @@ class ManagedRule: """ identifier: config.ManagedRuleIdentifiers = field(init=False) - config_rule_name: Optional[str] = None + config_rule_name: str = field(init=False) description: Optional[str] = None input_parameters: Optional[Dict[str, Any]] = None maximum_execution_frequency: Optional[config.MaximumExecutionFrequency] = config.MaximumExecutionFrequency.TWENTY_FOUR_HOURS @@ -32,6 +32,8 @@ class ManagedRule: def __init__(self, rule_parameters: dict): param = rule_parameters["Parameters"] + if "RuleName" in param: + self.config_rule_name = param["RuleName"] if param["SourceIdentifier"]: identifier = param["SourceIdentifier"].upper().replace("-", "_") try: @@ -61,7 +63,7 @@ def __init__(self, rule_parameters: dict): self.maximum_execution_frequency = maximum_execution_frequency if "SourceEvents" in param: try: - source_events = getattr(config.ResourceType, param["SourceEvents"].upper().replace("AWS::", "").replace("::", "_")) + source_events = getattr(config.ResourceType, param["SourceEvents"].upper().replace("AWS::", "").replace("::", "_").replace("EC2_VOLUME", "EBS_VOLUME")) # cdk use EBS Volume instead of EC2 Volume except: raise RdkParametersInvalidError("Invalid parameters found in Parameters.SourceEvents. Please review https://docs.aws.amazon.com/config/latest/developerguide/resource-config-reference.html") self.rule_scope = config.RuleScope.from_resources([source_events]) \ No newline at end of file diff --git a/rdk/frameworks/cdk/cdk/core/config/remediation_configuration.py b/rdk/frameworks/cdk/cdk/core/config/remediation_configuration.py index 3a814325..2dba4fc6 100644 --- a/rdk/frameworks/cdk/cdk/core/config/remediation_configuration.py +++ b/rdk/frameworks/cdk/cdk/core/config/remediation_configuration.py @@ -15,6 +15,7 @@ class RemediationConfiguration: Parameters: + * **`config_rule_name `** (_str_): The name of the AWS Config rule. * **`target_id `** (_str_): Target ID is the name of the SSM document. * **`target_type `** (_str_): The type of the target. Target executes remediation. For example, SSM document. * **`automatic`** (_Union[bool, IResolvable, None]_) : Optional - The remediation is triggered automatically. @@ -27,6 +28,7 @@ class RemediationConfiguration: """ + config_rule_name: str = field(init=False) target_id: str = field(init=False) target_type: str = field(init=False) automatic: Optional[Union[bool, IResolvable, None]] = None @@ -42,6 +44,8 @@ def __init__(self, rule_parameters: dict): param = rule_parameters["Parameters"]['Remediation'] self.target_id = param["TargetId"] self.target_type = param["TargetType"] + if "RuleName" in rule_parameters["Parameters"]: + self.config_rule_name = rule_parameters["Parameters"]["RuleName"] if "Automatic" in param: self.automatic = param["Automatic"] if "ExecutionControls" in param: From 210d39ae1cff946c39250d5ededff9d3164cd5a9 Mon Sep 17 00:00:00 2001 From: Ricky Chau Date: Thu, 20 Apr 2023 16:02:56 -0400 Subject: [PATCH 08/23] feat(destroy): add support for destroy --- rdk/cli/commands/destroy.py | 15 +++++++ rdk/cli/main.py | 32 +++++++++++++- rdk/core/rules_deploy.py | 18 +++++++- rdk/core/rules_test.py | 2 +- rdk/frameworks/cdk/cdk/cdk_stack.py | 10 ++--- .../cdk/cdk/core/config/custom_policy.py | 5 +-- .../cdk/cdk/core/config/managed_rule.py | 4 +- .../core/config/remediation_configuration.py | 43 ++++++++++--------- rdk/runners/cdk.py | 33 ++++++++++++++ 9 files changed, 127 insertions(+), 35 deletions(-) create mode 100644 rdk/cli/commands/destroy.py diff --git a/rdk/cli/commands/destroy.py b/rdk/cli/commands/destroy.py new file mode 100644 index 00000000..c9fc12e8 --- /dev/null +++ b/rdk/cli/commands/destroy.py @@ -0,0 +1,15 @@ +import sys +from typing import Any, Callable, Dict, List, Optional + +from rdk.core.rules_deploy import RulesDeploy +from rdk.utils.logger import get_main_logger + + +def run(rulenames: List[str], dryrun: bool): + """ + test sub-command handler. + """ + logger = get_main_logger() + logger.info("Destroying RDK rules ...") + + sys.exit(RulesDeploy(rulenames=rulenames, dryrun=dryrun).destroy()) diff --git a/rdk/cli/main.py b/rdk/cli/main.py index a6c7aaf1..3e3c209b 100644 --- a/rdk/cli/main.py +++ b/rdk/cli/main.py @@ -7,6 +7,7 @@ import rdk.cli.commands.deploy as deploy_cmd import rdk.cli.commands.init as init_cmd import rdk.cli.commands.test as test_cmd +import rdk.cli.commands.destroy as destroy_cmd import rdk.utils.logger as rdk_logger @@ -104,6 +105,28 @@ def main(): help="Verbose mode", ) + # destroy + commands_parser_destroy = commands_parser.add_parser( + "destroy", + help="destroy AWS Config Rules", + ) + + commands_parser_destroy.add_argument( + "rulename", + metavar="", + nargs="*", + default="", + help="Rule name(s) to destroy. Rule(s) will be removed." + ) + + commands_parser_destroy.add_argument( + "-n", + "--dryrun", + action="store_true", + default=False, + help="Dry run mode", + ) + # _pytest -- hidden command used by pytests commands_parser.add_parser( "_pytest", @@ -136,9 +159,16 @@ def main(): dryrun=args.dryrun, ) - # handle: deploy + # handle: test if args.command == "test": test_cmd.run( rulenames=args.rulename, verbose=args.verbose, + ) + + # handle: destroy + if args.command == "destroy": + destroy_cmd.run( + rulenames=args.rulename, + dryrun=args.dryrun, ) \ No newline at end of file diff --git a/rdk/core/rules_deploy.py b/rdk/core/rules_deploy.py index 79eac1ad..a87622e5 100644 --- a/rdk/core/rules_deploy.py +++ b/rdk/core/rules_deploy.py @@ -41,5 +41,19 @@ def run(self): ) cdk_runner.synthesize() - # cdk_runner.bootstrap() - # cdk_runner.deploy() \ No newline at end of file + cdk_runner.bootstrap() + cdk_runner.deploy() + + def destroy(self): + """ + Destroy Rules Deployment. + """ + if len(self.rulenames) > 0: + rules_dir = Path(self.rulenames[0]) + else: + rules_dir=Path().absolute() + + cdk_runner = CdkRunner( + rules_dir=rules_dir + ) + cdk_runner.destroy() diff --git a/rdk/core/rules_test.py b/rdk/core/rules_test.py index 0b4dad8d..01e1a61b 100644 --- a/rdk/core/rules_test.py +++ b/rdk/core/rules_test.py @@ -72,7 +72,7 @@ def run(self): elif runtime in ["cloudformation-guard2.0", "guard-2.x.x"]: test_report["cfn_guard_results"] += self._run_cfn_guard_test(test_dir) else: - self.logger.info(f"Skipping {rule_name} - The Custom Rule Runtime or Managed Rule are not supported for unit testing.") + self.logger.info(f"Skipping {rule_name} - The Custom Rule Runtime provided or Managed Rule is not supported for unit testing.") exit(self._result_summary(test_report)) diff --git a/rdk/frameworks/cdk/cdk/cdk_stack.py b/rdk/frameworks/cdk/cdk/cdk_stack.py index d74a8545..55a8a944 100644 --- a/rdk/frameworks/cdk/cdk/cdk_stack.py +++ b/rdk/frameworks/cdk/cdk/cdk_stack.py @@ -33,11 +33,11 @@ def __init__(self, scope: Construct, construct_id: str, **kwargs) -> None: rule_parameters = get_rule_parameters(rule_path) if "SourceRuntime" in rule_parameters["Parameters"] and rule_parameters["Parameters"]["SourceRuntime"] in ["cloudformation-guard2.0", "guard-2.x.x"]: - arg = CustomPolicy(policy_text=rule_path.joinpath("rule_code.guard").read_text(), rule_parameters=rule_parameters, config_rule_name = rule_name) - config.CustomPolicy(self, rule_name, **asdict(arg)).config_rule_name + arg = CustomPolicy(policy_text=rule_path.joinpath("rule_code.guard").read_text(), rule_parameters=rule_parameters) + config.CustomPolicy(self, rule_name, **asdict(arg)) elif "SourceIdentifier" in rule_parameters["Parameters"] and rule_parameters["Parameters"]["SourceIdentifier"]: - arg = ManagedRule(rule_parameters=rule_parameters, config_rule_name = rule_name) - config.ManagedRule(self, rule_name, **asdict(arg)).config_rule_name + arg = ManagedRule(rule_parameters=rule_parameters) + config.ManagedRule(self, rule_name, **asdict(arg)) # elif rule_parameters["Parameters"]["SourceRuntime"] in rdk_supported_custom_rule_runtime: # # Lambda function containing logic that evaluates compliance with the rule. # eval_compliance_fn = lambda_.Function(self, "CustomFunction", @@ -58,7 +58,7 @@ def __init__(self, scope: Construct, construct_id: str, **kwargs) -> None: # raise RdkRuleTypesInvalidError(f"Error loading parameters file for Rule {rule_name}") if "Remediation" in rule_parameters["Parameters"] and rule_parameters["Parameters"]["Remediation"]: - arg = RemediationConfiguration(rule_parameters=rule_parameters, config_rule_name = rule_name) + arg = RemediationConfiguration(rule_parameters=rule_parameters) config.CfnRemediationConfiguration(self, "MyCfnRemediationConfiguration", **asdict(arg)) # # A rule to detect stack drifts # drift_rule = config.CloudFormationStackDriftDetectionCheck(self, "Drift") diff --git a/rdk/frameworks/cdk/cdk/core/config/custom_policy.py b/rdk/frameworks/cdk/cdk/core/config/custom_policy.py index 40719467..731ca075 100644 --- a/rdk/frameworks/cdk/cdk/core/config/custom_policy.py +++ b/rdk/frameworks/cdk/cdk/core/config/custom_policy.py @@ -19,7 +19,7 @@ class CustomPolicy: * **`config_rule_name`** (_str_): A name for the AWS Config rule. Default: - CloudFormation generated name * **`description`** (_str_): Optional - A description about this AWS Config rule. Default: - No description * **`input_parameters`** (_Dict[str, Any]_): Optional - Input parameter values that are passed to the AWS Config rule. Default: - No input parameters - * **`maximum_execution_frequency`** (_MaximumExecutionFrequency_): Optional - The maximum frequency at which the AWS Config rule runs evaluations. Default: MaximumExecutionFrequency.TWENTY_FOUR_HOURS + * **`maximum_execution_frequency`** (_MaximumExecutionFrequency_): Optional - The maximum frequency at which the AWS Config rule runs evaluations. * **`rule_scope`** (_RuleScope_): Optional - Defines which resources trigger an evaluation for an AWS Config rule. Default: - evaluations for the rule are triggered when any resource in the recording group changes. """ @@ -29,7 +29,7 @@ class CustomPolicy: config_rule_name: str = field(init=False) description: Optional[str] = None input_parameters: Optional[Dict[str, Any]] = None - maximum_execution_frequency: Optional[config.MaximumExecutionFrequency] = config.MaximumExecutionFrequency.TWENTY_FOUR_HOURS + maximum_execution_frequency: Optional[config.MaximumExecutionFrequency] = None rule_scope: Optional[config.RuleScope] = None def __init__(self, policy_text: str, rule_parameters: dict): @@ -40,7 +40,6 @@ def __init__(self, policy_text: str, rule_parameters: dict): if "EnableDebugLogDelivery" in param: self.enable_debug_log = True if "Description" in param: - print(param["Description"]) self.description = param["Description"] if "InputParameters" in param: self.input_parameters = json.loads(param["InputParameters"]) diff --git a/rdk/frameworks/cdk/cdk/core/config/managed_rule.py b/rdk/frameworks/cdk/cdk/core/config/managed_rule.py index 97043a1d..9869bed6 100644 --- a/rdk/frameworks/cdk/cdk/core/config/managed_rule.py +++ b/rdk/frameworks/cdk/cdk/core/config/managed_rule.py @@ -18,7 +18,7 @@ class ManagedRule: * **`config_rule_name`** (_str_): A name for the AWS Config rule. Default: - CloudFormation generated name * **`description`** (_str_): Optional - A description about this AWS Config rule. Default: - No description * **`input_parameters`** (_Dict[str, Any]_): Optional - Input parameter values that are passed to the AWS Config rule. Default: - No input parameters - * **`maximum_execution_frequency`** (_MaximumExecutionFrequency_): Optional - The maximum frequency at which the AWS Config rule runs evaluations. Default: MaximumExecutionFrequency.TWENTY_FOUR_HOURS + * **`maximum_execution_frequency`** (_MaximumExecutionFrequency_): Optional - The maximum frequency at which the AWS Config rule runs evaluations. * **`rule_scope`** (_RuleScope_): Optional - Defines which resources trigger an evaluation for an AWS Config rule. Default: - evaluations for the rule are triggered when any resource in the recording group changes. """ @@ -27,7 +27,7 @@ class ManagedRule: config_rule_name: str = field(init=False) description: Optional[str] = None input_parameters: Optional[Dict[str, Any]] = None - maximum_execution_frequency: Optional[config.MaximumExecutionFrequency] = config.MaximumExecutionFrequency.TWENTY_FOUR_HOURS + maximum_execution_frequency: Optional[config.MaximumExecutionFrequency] = None rule_scope: Optional[config.RuleScope] = None def __init__(self, rule_parameters: dict): diff --git a/rdk/frameworks/cdk/cdk/core/config/remediation_configuration.py b/rdk/frameworks/cdk/cdk/core/config/remediation_configuration.py index 2dba4fc6..87b576a7 100644 --- a/rdk/frameworks/cdk/cdk/core/config/remediation_configuration.py +++ b/rdk/frameworks/cdk/cdk/core/config/remediation_configuration.py @@ -41,31 +41,32 @@ class RemediationConfiguration: def __init__(self, rule_parameters: dict): - param = rule_parameters["Parameters"]['Remediation'] - self.target_id = param["TargetId"] - self.target_type = param["TargetType"] + param = rule_parameters["Parameters"] + reme_param = param['Remediation'] + self.target_id = reme_param["TargetId"] + self.target_type = reme_param["TargetType"] if "RuleName" in rule_parameters["Parameters"]: self.config_rule_name = rule_parameters["Parameters"]["RuleName"] - if "Automatic" in param: - self.automatic = param["Automatic"] - if "ExecutionControls" in param: - if "SsmControls" in param["ExecutionControls"]: - if "ConcurrentExecutionRatePercentage" in param["ExecutionControls"]["SsmControls"]: - concurrent_execution_rate_percentage = param["ExecutionControls"]["SsmControls"]["ConcurrentExecutionRatePercentage"] - if "ErrorPercentage" in param["ExecutionControls"]["SsmControls"]: - error_percentage = param["ExecutionControls"]["SsmControls"]["ErrorPercentage"] + if "Automatic" in reme_param: + self.automatic = reme_param["Automatic"] + if "ExecutionControls" in reme_param: + if "SsmControls" in reme_param["ExecutionControls"]: + if "ConcurrentExecutionRatePercentage" in reme_param["ExecutionControls"]["SsmControls"]: + concurrent_execution_rate_percentage = reme_param["ExecutionControls"]["SsmControls"]["ConcurrentExecutionRatePercentage"] + if "ErrorPercentage" in reme_param["ExecutionControls"]["SsmControls"]: + error_percentage = reme_param["ExecutionControls"]["SsmControls"]["ErrorPercentage"] ssm_controls = config.CfnRemediationConfiguration.SsmControlsProperty( concurrent_execution_rate_percentage=concurrent_execution_rate_percentage, error_percentage=error_percentage ) self.execution_controls = config.CfnRemediationConfiguration.ExecutionControlsProperty(ssm_controls) - if "MaximumAutomaticAttempts" in param: - self.maximum_automatic_attempts = int(param["MaximumAutomaticAttempts"]) - if "Parameters" in param: - self.parameters = param["Parameters"] - if "ResourceType" in param: - self.resource_type = param["ResourceType"] - if "RetryAttemptSeconds" in param: - self.retry_attempt_seconds = int(param["RetryAttemptSeconds"]) - if "TargetVersion" in param: - self.target_version = param["TargetVersion"] + if "MaximumAutomaticAttempts" in reme_param: + self.maximum_automatic_attempts = int(reme_param["MaximumAutomaticAttempts"]) + if "Parameters" in reme_param: + self.parameters = reme_param["Parameters"] + if "ResourceType" in reme_param: + self.resource_type = reme_param["ResourceType"] + if "RetryAttemptSeconds" in reme_param: + self.retry_attempt_seconds = int(reme_param["RetryAttemptSeconds"]) + if "TargetVersion" in reme_param: + self.target_version = reme_param["TargetVersion"] diff --git a/rdk/runners/cdk.py b/rdk/runners/cdk.py index 2e223560..75326dc1 100644 --- a/rdk/runners/cdk.py +++ b/rdk/runners/cdk.py @@ -46,6 +46,15 @@ def synthesize(self): cmd = [ "cdk", "synth", + "--validation", + "--output", + self.rules_dir.joinpath("build/").as_posix(), + # "--version-reporting", + # "false", + # "--path-metadata", + # "false", + # "--asset-metadata", + # "false", "--context", "rules_dir=" + self.rules_dir.as_posix() ] @@ -101,6 +110,30 @@ def deploy(self): self.logger.info("Deploying AWS Config Rules ...") + self.run_cmd( + cmd=cmd, + cwd=self.cdk_app_dir.as_posix(), + allowed_return_codes=[0, 2], + ) + + def destroy(self): + """ + Executes `cdk destroy`. + + Parameters: + + """ + cmd = [ + "cdk", + "destroy", + "--context", + "rules_dir=" + self.rules_dir.as_posix(), + "--force" + ] + + + self.logger.info("Destroying AWS Config Rules ...") + self.run_cmd( cmd=cmd, cwd=self.cdk_app_dir.as_posix(), From 2fa511e11c771782214607ac627f2f4a0a3d8896 Mon Sep 17 00:00:00 2001 From: Ricky Chau Date: Thu, 20 Apr 2023 17:49:52 -0400 Subject: [PATCH 09/23] feat(tests): add testing rules --- README.md | 16 ++ .../AMI_DEPRECATED_CHECK.py | 155 ++++++++++++ .../AMI_DEPRECATED_CHECK_test.py | 229 ++++++++++++++++++ .../AMI_DEPRECATED_CHECK/parameters.json | 13 + .../API_GATEWAY_PRIVATE/parameters.json | 45 ++++ .../API_GATEWAY_PRIVATE/rule_code.guard | 39 +++ .../API_GATEWAY_PRIVATE/tests/FAIL.yaml | 20 ++ .../API_GATEWAY_PRIVATE/tests/PASS.yaml | 12 + .../API_GATEWAY_PRIVATE/tests/SKIP.yaml | 12 + .../ENCRYPTED_VOLUMES/parameters.json | 15 ++ .../parameters.json | 15 ++ .../parameters.json | 12 + .../rule_code.guard | 3 + 13 files changed, 586 insertions(+) create mode 100644 tests/integration/rdk-cdk-rule-dir/AMI_DEPRECATED_CHECK/AMI_DEPRECATED_CHECK.py create mode 100644 tests/integration/rdk-cdk-rule-dir/AMI_DEPRECATED_CHECK/AMI_DEPRECATED_CHECK_test.py create mode 100644 tests/integration/rdk-cdk-rule-dir/AMI_DEPRECATED_CHECK/parameters.json create mode 100644 tests/integration/rdk-cdk-rule-dir/API_GATEWAY_PRIVATE/parameters.json create mode 100644 tests/integration/rdk-cdk-rule-dir/API_GATEWAY_PRIVATE/rule_code.guard create mode 100644 tests/integration/rdk-cdk-rule-dir/API_GATEWAY_PRIVATE/tests/FAIL.yaml create mode 100644 tests/integration/rdk-cdk-rule-dir/API_GATEWAY_PRIVATE/tests/PASS.yaml create mode 100644 tests/integration/rdk-cdk-rule-dir/API_GATEWAY_PRIVATE/tests/SKIP.yaml create mode 100644 tests/integration/rdk-cdk-rule-dir/ENCRYPTED_VOLUMES/parameters.json create mode 100644 tests/integration/rdk-cdk-rule-dir/S3_BUCKET_SERVER_SIDE_ENCRYPTION_ENABLED/parameters.json create mode 100644 tests/integration/rdk-cdk-rule-dir/SNS_TOPIC_IN_US_EAST_1_ONLY/parameters.json create mode 100644 tests/integration/rdk-cdk-rule-dir/SNS_TOPIC_IN_US_EAST_1_ONLY/rule_code.guard diff --git a/README.md b/README.md index e69de29b..b81187fd 100644 --- a/README.md +++ b/README.md @@ -0,0 +1,16 @@ + + +# Steps to setup your local environment +make freeze +make init + +# Editorable mode by activate pipenv +pipenv shell + +# Navigagte to rules dir in integration test +cd tests/rdk-cdk-int-rules-dir + +# Run RDK command for testing +rdk test +rdk deploy +rdk destroy \ No newline at end of file diff --git a/tests/integration/rdk-cdk-rule-dir/AMI_DEPRECATED_CHECK/AMI_DEPRECATED_CHECK.py b/tests/integration/rdk-cdk-rule-dir/AMI_DEPRECATED_CHECK/AMI_DEPRECATED_CHECK.py new file mode 100644 index 00000000..29b7a009 --- /dev/null +++ b/tests/integration/rdk-cdk-rule-dir/AMI_DEPRECATED_CHECK/AMI_DEPRECATED_CHECK.py @@ -0,0 +1,155 @@ +# +# This file made available under CC0 1.0 Universal (https://creativecommons.org/publicdomain/zero/1.0/legalcode) +# +# Created with the Rule Development Kit: https://github.com/awslabs/aws-config-rdk +# Can be used stand-alone or with the Rule Compliance Engine: https://github.com/awslabs/aws-config-engine-for-compliance-as-code +# + +from datetime import datetime +from rdklib import Evaluator, Evaluation, ConfigRule, ComplianceType + +APPLICABLE_RESOURCES = ["AWS::AutoScaling::AutoScalingGroup", "AWS::EC2::Instance"] +DEFAULT_RESOURCE_TYPE = "AWS::EC2::Instance" + +class AMI_DEPRECATED_CHECK(ConfigRule): + def evaluate_change(self, event, client_factory, configuration_item, valid_rule_parameters): + pass + + def evaluate_periodic(self, event, client_factory, valid_rule_parameters): + ec2_client = client_factory.build_client("ec2") + asg_client = client_factory.build_client("autoscaling") + + mode = valid_rule_parameters['mode'] + if mode == 'ASG': + return self.evaluate_asgs(ec2_client, asg_client) + return self.evaluate_instances(ec2_client) + + def evaluate_parameters(self, rule_parameters): + valid_rule_parameters = rule_parameters + if 'mode' not in rule_parameters: + valid_rule_parameters['mode'] = 'EC2' + if valid_rule_parameters['mode'] not in ('EC2', 'ASG'): + raise ValueError('Rule only supports parameter mode of EC2 and ASG') + return valid_rule_parameters + + def evaluate_instances(self, ec2_client): + evaluations = [] + instances = get_all_instances(ec2_client) + for instance in instances: + ami_id = instance['ImageId'] + + compliance_type, annotation = self.evaluate_ami(ec2_client, ami_id) + evaluation = Evaluation( + resourceType='AWS::EC2::Instance', + resourceId=instance['InstanceId'], + complianceType=compliance_type, + annotation=annotation, + ) + evaluations.append(evaluation) + + return evaluations + + def evaluate_asgs(self, ec2_client, asg_client): + evaluations = [] + asgs = get_all_asgs(asg_client) + for asg in asgs: + ami_id = get_ami_from_asg(asg_client, ec2_client, asg) + + compliance_type, annotation = self.evaluate_ami(ec2_client, ami_id) + evaluation = Evaluation( + resourceType='AWS::AutoScaling::AutoScalingGroup', + resourceId=asg['AutoScalingGroupName'], + complianceType=compliance_type, + annotation=annotation, + ) + evaluations.append(evaluation) + + return evaluations + + def evaluate_ami(self, ec2_client, ami_id): + if not ami_id: + print(f'AMI {ami_id} is None, assuming deprecated/unshared/deleted') + return ComplianceType.NON_COMPLIANT, f'Image {ami_id} is either unshared or deleted' + try: + response = ec2_client.describe_images( + ImageIds=[ami_id], + IncludeDeprecated=True, + ) + image = response['Images'][0] + if 'DeprecationTime' not in image: + return ComplianceType.COMPLIANT, f'Image {ami_id} is not deprecated' + deprecation_time = datetime.strptime(image['DeprecationTime'], '%Y-%m-%dT%H:%M:%S.%fZ') + current_time = datetime.utcnow() + if deprecation_time < current_time: + return ComplianceType.NON_COMPLIANT, f'Image {ami_id} is deprecated' + return ComplianceType.COMPLIANT, f'Image {ami_id} is not deprecated' + except Exception as e: + print(f'Exception checking {ami_id}, assuming deprecated/unshared/deleted: {e}') + return ComplianceType.NON_COMPLIANT, f'Error checking {ami_id}, assuming noncompliant' + + +def get_ami_from_asg(asg_client, ec2_client, asg): + # asg is the individual asg metadata from the AWS API + try: + if 'MixedInstancesPolicy' in asg: + launch_template_spec = asg['MixedInstancesPolicy']['LaunchTemplate'] \ + ['LaunchTemplateSpecification'] + response = ec2_client.describe_launch_template_versions( + LaunchTemplateId = launch_template_spec['LaunchTemplateId'], + Versions = [launch_template_spec['Version']] + ) + return response['LaunchTemplateVersions'][0]['LaunchTemplateData']['ImageId'] + elif 'LaunchTemplate' in asg: + launch_template_spec = asg['LaunchTemplate'] + response = ec2_client.describe_launch_template_versions( + LaunchTemplateId = launch_template_spec['LaunchTemplateId'], + Versions = [launch_template_spec['Version']] + ) + return response['LaunchTemplateVersions'][0]['LaunchTemplateData']['ImageId'] + else: + launch_config_name = asg['LaunchConfigurationName'] + response = asg_client.describe_launch_configurations( + LaunchConfigurationNames = [launch_config_name] + ) + return response['LaunchConfigurations'][0]['ImageId'] + except Exception as e: + asg_name = asg.get('AutoScalingGroupName', 'Unknown') + print(f'Error retrieving AMI from ASG {asg_name}: {e}') + return None + +def get_all_asgs(asg_client): + asgs = [] + response = asg_client.describe_auto_scaling_groups() + asgs.extend(response['AutoScalingGroups']) + while 'NextToken' in response: + response = asg_client.describe_auto_scaling_groups(response['NextToken']) + asgs.extend(response['AutoScalingGroups']) + return asgs + +def get_all_instances(ec2_client): + instances = [] + # Get all instances with pagination + response = ec2_client.describe_instances( + Filters=[ + { + 'Name': 'instance-state-name', + 'Values': ['pending', 'running', 'stopping', 'stopped'] + }, + ], + ) + for reservation in response["Reservations"]: + instances.extend(reservation["Instances"]) + while 'NextToken' in response: + response = ec2_client.describe_instances(NextToken=response['NextToken']) + for reservation in response["Reservations"]: + instances.extend(reservation["Instances"]) + return instances + + +################################ +# DO NOT MODIFY ANYTHING BELOW # +################################ +def lambda_handler(event, context): + my_rule = AMI_DEPRECATED_CHECK() + evaluator = Evaluator(my_rule) + return evaluator.handle(event, context) \ No newline at end of file diff --git a/tests/integration/rdk-cdk-rule-dir/AMI_DEPRECATED_CHECK/AMI_DEPRECATED_CHECK_test.py b/tests/integration/rdk-cdk-rule-dir/AMI_DEPRECATED_CHECK/AMI_DEPRECATED_CHECK_test.py new file mode 100644 index 00000000..ad0ff3b3 --- /dev/null +++ b/tests/integration/rdk-cdk-rule-dir/AMI_DEPRECATED_CHECK/AMI_DEPRECATED_CHECK_test.py @@ -0,0 +1,229 @@ +# +# This file made available under CC0 1.0 Universal (https://creativecommons.org/publicdomain/zero/1.0/legalcode) +# +# Created with the Rule Development Kit: https://github.com/awslabs/aws-config-rdk +# Can be used stand-alone or with the Rule Compliance Engine: https://github.com/awslabs/aws-config-engine-for-compliance-as-code +# + +import unittest +from unittest.mock import patch, MagicMock +from botocore.exceptions import ClientError +import rdklib +from rdklib import Evaluation, ComplianceType +from rdklibtest import assert_successful_evaluation + +############# +# Main Code # +############# + +MODULE = __import__('AMI_DEPRECATED_CHECK') +RULE = MODULE.AMI_DEPRECATED_CHECK() + +#example for mocking S3 API calls +CLIENT_FACTORY = MagicMock() +EC2_CLIENT_MOCK = MagicMock() +ASG_CLIENT_MOCK = MagicMock() + +def mock_get_client(client_name, *args, **kwargs): + if client_name == "ec2": + return EC2_CLIENT_MOCK + elif client_name == 'autoscaling': + return ASG_CLIENT_MOCK + raise Exception("Attempting to create an unknown client") + + +def mock_evaluator_handle(event, context): + return f"Event: {event} - Context: {context}" + + +@patch.object(CLIENT_FACTORY, "build_client", MagicMock(side_effect=mock_get_client)) +class ComplianceTest(unittest.TestCase): + + deprecated_ami_response = { + "Images": [ + { + "CreationDate": "2021-07-19T19:03:00.000Z", + "ImageId": "ami-abcd1234", + "Name": "test-image", + "DeprecationTime": "2021-07-21T17:03:00.000Z" + } + ] + } + + compliant_ami_response = { + "Images": [ + { + "CreationDate": "2021-07-01T19:03:00.000Z", + "ImageId": "ami-abcd1234", + "Name": "test-image" + } + ] + } + + missing_ami_response = {'Images': []} + + instance_response = { + "Reservations": [ + { + "Groups": [], + "Instances": [ + { + "ImageId": "ami-abcd1234", + "InstanceId": "i-abcd1234" + } + ] + } + ] + } + + asg_launch_template = { + "AutoScalingGroups": [ + { + "AutoScalingGroupName": "test-asg", + "LaunchTemplate": { + "LaunchTemplateId": "lt-xyz789", + "LaunchTemplateName": "test-lt", + "Version": "1" + } + } + ] + } + + asg_mixed_instances = { + "AutoScalingGroups": [ + { + "AutoScalingGroupName": "test-asg", + "MixedInstancesPolicy": { + "LaunchTemplate": { + "LaunchTemplateSpecification": { + "LaunchTemplateId": "lt-xyz789", + "LaunchTemplateName": "test-lt", + "Version": 1 + } + } + } + } + ] + } + + asg_launch_config = { + "AutoScalingGroups": [ + { + "AutoScalingGroupName": "test-asg", + "LaunchConfigurationName": "test-lc" + } + ] + } + + launch_template_versions = { + 'LaunchTemplateVersions': [ + { + 'LaunchTemplateData': { + 'ImageId': 'ami-6057e21a' + }, + 'LaunchTemplateId': "lt-xyz789", + 'LaunchTemplateName': "test-lt", + 'VersionNumber': 2, + } + ] + } + + launch_config = { + "LaunchConfigurations": [ + { + "LaunchConfigurationName": "test-lc", + "ImageId": "ami-abcd1234" + } + ] + } + + def setUp(self): + EC2_CLIENT_MOCK.reset_mock() + ASG_CLIENT_MOCK.reset_mock() + + def test_evaluate_compliant_instance(self): + EC2_CLIENT_MOCK.describe_instances.return_value = self.instance_response + EC2_CLIENT_MOCK.describe_images.return_value = self.compliant_ami_response + response = RULE.evaluate_periodic({}, CLIENT_FACTORY, {'mode': 'EC2'}) + instance = self.instance_response['Reservations'][0]['Instances'][0] + response_expected = [Evaluation( + complianceType=ComplianceType.COMPLIANT, + resourceId=instance['InstanceId'], + resourceType='AWS::EC2::Instance', + annotation=f'Image {instance["ImageId"]} is not deprecated' + )] + assert_successful_evaluation(self, response, response_expected) + + def test_evaluate_noncompliant_instance_deprecated_ami(self): + EC2_CLIENT_MOCK.describe_instances.return_value = self.instance_response + EC2_CLIENT_MOCK.describe_images.return_value = self.deprecated_ami_response + response = RULE.evaluate_periodic({}, CLIENT_FACTORY, {'mode': 'EC2'}) + instance = self.instance_response['Reservations'][0]['Instances'][0] + response_expected = [Evaluation( + complianceType=ComplianceType.NON_COMPLIANT, + resourceId=instance['InstanceId'], + resourceType='AWS::EC2::Instance', + annotation=f'Image {instance["ImageId"]} is deprecated' + )] + assert_successful_evaluation(self, response, response_expected) + + def test_evaluate_noncompliant_instance_missing_ami(self): + EC2_CLIENT_MOCK.describe_instances.return_value = self.instance_response + EC2_CLIENT_MOCK.describe_images.return_value = self.missing_ami_response + response = RULE.evaluate_periodic({}, CLIENT_FACTORY, {'mode': 'EC2'}) + instance = self.instance_response['Reservations'][0]['Instances'][0] + response_expected = [Evaluation( + complianceType=ComplianceType.NON_COMPLIANT, + resourceId=instance['InstanceId'], + resourceType='AWS::EC2::Instance', + annotation=f'Error checking {instance["ImageId"]}, assuming noncompliant' + )] + assert_successful_evaluation(self, response, response_expected) + + def test_evaluate_asg_mixed_instances_launch_template_compliant(self): + ASG_CLIENT_MOCK.describe_auto_scaling_groups.return_value = self.asg_mixed_instances + EC2_CLIENT_MOCK.describe_launch_template_versions.return_value = self.launch_template_versions + EC2_CLIENT_MOCK.describe_images.return_value = self.compliant_ami_response + response = RULE.evaluate_periodic({}, CLIENT_FACTORY, {'mode': 'ASG'}) + asg = self.asg_mixed_instances['AutoScalingGroups'][0] + launch_template_version = self.launch_template_versions['LaunchTemplateVersions'][0] + response_expected = [Evaluation( + complianceType=ComplianceType.COMPLIANT, + resourceId=asg['AutoScalingGroupName'], + resourceType='AWS::AutoScaling::AutoScalingGroup', + annotation=f'Image {launch_template_version["LaunchTemplateData"]["ImageId"]} is not deprecated' + )] + assert_successful_evaluation(self, response, response_expected) + + def test_evaluate_noncompliant_asg_launch_config_deprecated_ami(self): + ASG_CLIENT_MOCK.describe_auto_scaling_groups.return_value = self.asg_launch_config + ASG_CLIENT_MOCK.describe_launch_configurations.return_value = self.launch_config + EC2_CLIENT_MOCK.describe_images.return_value = self.deprecated_ami_response + response = RULE.evaluate_periodic({}, CLIENT_FACTORY, {'mode': 'ASG'}) + asg = self.asg_launch_config['AutoScalingGroups'][0] + launch_config = self.launch_config['LaunchConfigurations'][0] + response_expected = [Evaluation( + complianceType=ComplianceType.NON_COMPLIANT, + resourceId=asg['AutoScalingGroupName'], + resourceType='AWS::AutoScaling::AutoScalingGroup', + annotation=f'Image {launch_config["ImageId"]} is deprecated' + )] + assert_successful_evaluation(self, response, response_expected) + + def test_evaluate_noncompliant_asg_launch_template_missing_ami(self): + ASG_CLIENT_MOCK.describe_auto_scaling_groups.return_value = self.asg_launch_template + ASG_CLIENT_MOCK.describe_launch_template_versions.return_value = self.launch_template_versions + EC2_CLIENT_MOCK.describe_images.return_value = self.missing_ami_response + response = RULE.evaluate_periodic({}, CLIENT_FACTORY, {'mode': 'ASG'}) + asg = self.asg_launch_template['AutoScalingGroups'][0] + launch_template_version = self.launch_template_versions['LaunchTemplateVersions'][0] + response_expected = [Evaluation( + complianceType=ComplianceType.NON_COMPLIANT, + resourceId=asg['AutoScalingGroupName'], + resourceType='AWS::AutoScaling::AutoScalingGroup', + annotation=f'Error checking {launch_template_version["LaunchTemplateData"]["ImageId"]}, assuming noncompliant' + )] + assert_successful_evaluation(self, response, response_expected) + +if __name__ == '__main__': + unittest.main() \ No newline at end of file diff --git a/tests/integration/rdk-cdk-rule-dir/AMI_DEPRECATED_CHECK/parameters.json b/tests/integration/rdk-cdk-rule-dir/AMI_DEPRECATED_CHECK/parameters.json new file mode 100644 index 00000000..0d0175ca --- /dev/null +++ b/tests/integration/rdk-cdk-rule-dir/AMI_DEPRECATED_CHECK/parameters.json @@ -0,0 +1,13 @@ +{ + "Version": "1.0", + "Parameters": { + "RuleName": "AMI_DEPRECATED_CHECK", + "Description": "AMI_DEPRECATED_CHECK", + "SourceRuntime": "python3.8-lib", + "CodeKey": "AMI_DEPRECATED_CHECK.zip", + "InputParameters": "{}", + "OptionalParameters": "{}", + "SourcePeriodic": "TwentyFour_Hours" + }, + "Tags": "[]" + } \ No newline at end of file diff --git a/tests/integration/rdk-cdk-rule-dir/API_GATEWAY_PRIVATE/parameters.json b/tests/integration/rdk-cdk-rule-dir/API_GATEWAY_PRIVATE/parameters.json new file mode 100644 index 00000000..90534604 --- /dev/null +++ b/tests/integration/rdk-cdk-rule-dir/API_GATEWAY_PRIVATE/parameters.json @@ -0,0 +1,45 @@ +{ + "Version": "1.0", + "Parameters": { + "RuleName": "API_GATEWAY_PRIVATE", + "Description": "API_GATEWAY_PRIVATE", + "SourceRuntime": "guard-2.x.x", + "InputParameters": "{}", + "OptionalParameters": "{}", + "SourcePeriodic": "TwentyFour_Hours", + "Remediation": { + "Automatic": true, + "ConfigRuleName": "TEST_rule", + "MaximumAutomaticAttempts": "2", + "Parameters": { + "AutomationAssumeRole": { + "StaticValue": { + "Values": [ + { "Fn::Sub": "arn:aws:iam::${AWS::AccountId}:role/sns-access" } + ] + } + }, + "Message": { + "StaticValue": { + "Values": [ + "hi" + ] + } + }, + "TopicArn": { + "StaticValue": { + "Values": [ + { "Fn::Sub": "arn:aws:sns:${AWS::Region}:${AWS::AccountId}:rules-notification" } + ] + } + } + }, + "ResourceType": "AWS::EC2::Instance", + "RetryAttemptSeconds": "2", + "TargetId": "AWS-PublishSNSNotification", + "TargetType": "SSM_DOCUMENT", + "TargetVersion": "1" + } + }, + "Tags": "[]" +} diff --git a/tests/integration/rdk-cdk-rule-dir/API_GATEWAY_PRIVATE/rule_code.guard b/tests/integration/rdk-cdk-rule-dir/API_GATEWAY_PRIVATE/rule_code.guard new file mode 100644 index 00000000..f2654974 --- /dev/null +++ b/tests/integration/rdk-cdk-rule-dir/API_GATEWAY_PRIVATE/rule_code.guard @@ -0,0 +1,39 @@ +# +# Select all AWS::ApiGateway::RestApi resources +# present in the Resources section of the template. +# +let api_gws = Resources.*[ Type == 'AWS::ApiGateway::RestApi'] + +# +# Rule intent: +# 1) All AWS::ApiGateway::RestApi resources deployed must be private. +# 2) All AWS::ApiGateway::RestApi resources deployed must have at least one AWS Identity and Access Management (IAM) policy condition key to allow access from a VPC. +# +# Expectations: +# 1) SKIP when there are no AWS::ApiGateway::RestApi resources in the template. +# 2) PASS when: +# ALL AWS::ApiGateway::RestApi resources in the template have the EndpointConfiguration property set to Type: PRIVATE. +# ALL AWS::ApiGateway::RestApi resources in the template have one IAM condition key specified in the Policy property with aws:sourceVpc or :SourceVpc. +# 3) FAIL otherwise. +# +# + +rule check_rest_api_is_private when %api_gws !empty { + %api_gws { + Properties.EndpointConfiguration.Types[*] == "PRIVATE" + } +} + +rule check_rest_api_has_vpc_access when check_rest_api_is_private { + %api_gws { + Properties { + # + # ALL AWS::ApiGateway::RestApi resources in the template have one IAM condition key specified in the Policy property with + # aws:sourceVpc or :SourceVpc + # + some Policy.Statement[*] { + Condition.*[ keys == /aws:[sS]ource(Vpc|VPC|Vpce|VPCE)/ ] !empty + } + } + } +} \ No newline at end of file diff --git a/tests/integration/rdk-cdk-rule-dir/API_GATEWAY_PRIVATE/tests/FAIL.yaml b/tests/integration/rdk-cdk-rule-dir/API_GATEWAY_PRIVATE/tests/FAIL.yaml new file mode 100644 index 00000000..9090fade --- /dev/null +++ b/tests/integration/rdk-cdk-rule-dir/API_GATEWAY_PRIVATE/tests/FAIL.yaml @@ -0,0 +1,20 @@ +--- +- name: MyTest3 + input: + Resources: + apiGw: + Type: AWS::ApiGateway::RestApi + expectations: + rules: + check_rest_api_is_private_and_has_access: FAIL +- name: MyTest5 + input: + Resources: + apiGw: + Type: AWS::ApiGateway::RestApi + Properties: + EndpointConfiguration: + Types: [PRIVATE, REGIONAL] + expectations: + rules: + check_rest_api_is_private: FAIL \ No newline at end of file diff --git a/tests/integration/rdk-cdk-rule-dir/API_GATEWAY_PRIVATE/tests/PASS.yaml b/tests/integration/rdk-cdk-rule-dir/API_GATEWAY_PRIVATE/tests/PASS.yaml new file mode 100644 index 00000000..333377f6 --- /dev/null +++ b/tests/integration/rdk-cdk-rule-dir/API_GATEWAY_PRIVATE/tests/PASS.yaml @@ -0,0 +1,12 @@ +--- +- name: MyTest4 + input: + Resources: + apiGw: + Type: AWS::ApiGateway::RestApi + Properties: + EndpointConfiguration: + Types: "PRIVATE" + expectations: + rules: + check_rest_api_is_private: PASS \ No newline at end of file diff --git a/tests/integration/rdk-cdk-rule-dir/API_GATEWAY_PRIVATE/tests/SKIP.yaml b/tests/integration/rdk-cdk-rule-dir/API_GATEWAY_PRIVATE/tests/SKIP.yaml new file mode 100644 index 00000000..10540118 --- /dev/null +++ b/tests/integration/rdk-cdk-rule-dir/API_GATEWAY_PRIVATE/tests/SKIP.yaml @@ -0,0 +1,12 @@ +--- +- name: MyTest1 + input: {} + expectations: + rules: + check_rest_api_is_private_and_has_access: SKIP +- name: MyTest2 + input: + Resources: {} + expectations: + rules: + check_rest_api_is_private_and_has_access: SKIP \ No newline at end of file diff --git a/tests/integration/rdk-cdk-rule-dir/ENCRYPTED_VOLUMES/parameters.json b/tests/integration/rdk-cdk-rule-dir/ENCRYPTED_VOLUMES/parameters.json new file mode 100644 index 00000000..d179f358 --- /dev/null +++ b/tests/integration/rdk-cdk-rule-dir/ENCRYPTED_VOLUMES/parameters.json @@ -0,0 +1,15 @@ +{ + "Version": "1.0", + "Parameters": { + "RuleName": "ENCRYPTED_VOLUMES", + "SourceRuntime": null, + "CodeKey": null, + "InputParameters": "{}", + "OptionalParameters": "{}", + "SourceEvents": "AWS::EC2::Volume", + "SourceIdentifier": "ENCRYPTED_VOLUMES", + "RuleSets": [ + "baseline" + ] + } +} diff --git a/tests/integration/rdk-cdk-rule-dir/S3_BUCKET_SERVER_SIDE_ENCRYPTION_ENABLED/parameters.json b/tests/integration/rdk-cdk-rule-dir/S3_BUCKET_SERVER_SIDE_ENCRYPTION_ENABLED/parameters.json new file mode 100644 index 00000000..7a1d7bff --- /dev/null +++ b/tests/integration/rdk-cdk-rule-dir/S3_BUCKET_SERVER_SIDE_ENCRYPTION_ENABLED/parameters.json @@ -0,0 +1,15 @@ +{ + "Version": "1.0", + "Parameters": { + "RuleName": "S3_BUCKET_SERVER_SIDE_ENCRYPTION_ENABLED", + "SourceRuntime": null, + "CodeKey": null, + "InputParameters": "{}", + "OptionalParameters": "{}", + "SourceEvents": "AWS::S3::Bucket", + "SourceIdentifier": "S3_BUCKET_SERVER_SIDE_ENCRYPTION_ENABLED", + "RuleSets": [ + "baseline" + ] + } +} diff --git a/tests/integration/rdk-cdk-rule-dir/SNS_TOPIC_IN_US_EAST_1_ONLY/parameters.json b/tests/integration/rdk-cdk-rule-dir/SNS_TOPIC_IN_US_EAST_1_ONLY/parameters.json new file mode 100644 index 00000000..8375592b --- /dev/null +++ b/tests/integration/rdk-cdk-rule-dir/SNS_TOPIC_IN_US_EAST_1_ONLY/parameters.json @@ -0,0 +1,12 @@ +{ + "Version": "1.0", + "Parameters": { + "RuleName": "SNS_TOPIC_IN_US_EAST_1_ONLY", + "Description": "SNS_TOPIC_IN_US_EAST_1_ONLY", + "SourceRuntime": "guard-2.x.x", + "InputParameters": "{}", + "OptionalParameters": "{}", + "SourcePeriodic": "TwentyFour_Hours" + }, + "Tags": "[]" + } \ No newline at end of file diff --git a/tests/integration/rdk-cdk-rule-dir/SNS_TOPIC_IN_US_EAST_1_ONLY/rule_code.guard b/tests/integration/rdk-cdk-rule-dir/SNS_TOPIC_IN_US_EAST_1_ONLY/rule_code.guard new file mode 100644 index 00000000..9ee40f78 --- /dev/null +++ b/tests/integration/rdk-cdk-rule-dir/SNS_TOPIC_IN_US_EAST_1_ONLY/rule_code.guard @@ -0,0 +1,3 @@ +rule MyCustomPolicy when resourceType IN ['AWS::SNS::Topic'] { + awsRegion == "us-east-1" +} \ No newline at end of file From ac4490f3af4c4b2dc51771821c634cff23210501 Mon Sep 17 00:00:00 2001 From: Ricky Chau Date: Fri, 21 Apr 2023 17:09:21 -0400 Subject: [PATCH 10/23] feat(python): add support to custom python rules --- rdk/core/rules_deploy.py | 1 + rdk/frameworks/cdk/cdk/cdk_stack.py | 41 +++++++----- .../cdk/cdk/core/config/custom_policy.py | 5 +- .../cdk/cdk/core/config/custom_rule.py | 64 +++++++++++++++++++ .../cdk/cdk/core/config/lambda_function.py | 46 +++++++++++++ .../cdk/cdk/core/config/managed_rule.py | 5 +- .../core/config/remediation_configuration.py | 2 +- .../cdk/cdk/core/rule_parameters.py | 8 ++- rdk/runners/cdk.py | 23 +++++++ .../AMI_DEPRECATED_CHECK/requirements.txt | 1 + 10 files changed, 170 insertions(+), 26 deletions(-) create mode 100644 rdk/frameworks/cdk/cdk/core/config/custom_rule.py create mode 100644 rdk/frameworks/cdk/cdk/core/config/lambda_function.py create mode 100644 tests/integration/rdk-cdk-rule-dir/AMI_DEPRECATED_CHECK/requirements.txt diff --git a/rdk/core/rules_deploy.py b/rdk/core/rules_deploy.py index a87622e5..ee4e20fa 100644 --- a/rdk/core/rules_deploy.py +++ b/rdk/core/rules_deploy.py @@ -40,6 +40,7 @@ def run(self): rules_dir=rules_dir ) + cdk_runner.diff() cdk_runner.synthesize() cdk_runner.bootstrap() cdk_runner.deploy() diff --git a/rdk/frameworks/cdk/cdk/cdk_stack.py b/rdk/frameworks/cdk/cdk/cdk_stack.py index 55a8a944..8798ec06 100644 --- a/rdk/frameworks/cdk/cdk/cdk_stack.py +++ b/rdk/frameworks/cdk/cdk/cdk_stack.py @@ -1,14 +1,18 @@ import json +import uuid from dataclasses import asdict from pathlib import Path +import aws_cdk as cdk from aws_cdk import Stack from aws_cdk import aws_config as config from aws_cdk import aws_lambda as lambda_ from constructs import Construct from .core.config.custom_policy import CustomPolicy +from .core.config.custom_rule import CustomRule from .core.config.managed_rule import ManagedRule +from .core.config.lambda_function import LambdaFunction from .core.config.remediation_configuration import RemediationConfiguration from .core.errors import RdkParametersInvalidError, RdkRuleTypesInvalidError @@ -31,27 +35,32 @@ def __init__(self, scope: Construct, construct_id: str, **kwargs) -> None: for rule_path in rules_list: rule_name = get_rule_name(rule_path) rule_parameters = get_rule_parameters(rule_path) - + print(f"Adding Rule {rule_name} ...") if "SourceRuntime" in rule_parameters["Parameters"] and rule_parameters["Parameters"]["SourceRuntime"] in ["cloudformation-guard2.0", "guard-2.x.x"]: arg = CustomPolicy(policy_text=rule_path.joinpath("rule_code.guard").read_text(), rule_parameters=rule_parameters) config.CustomPolicy(self, rule_name, **asdict(arg)) + elif "SourceRuntime" in rule_parameters["Parameters"] and rule_parameters["Parameters"]["SourceRuntime"] in rdk_supported_custom_rule_runtime: + # Lambda function containing logic that evaluates compliance with the rule. + fn_arg = LambdaFunction(code=lambda_.Code.from_asset(rule_path.as_posix() + # , + # bundling=cdk.BundlingOptions( + # image=lambda_.Runtime.PYTHON_3_9.bundling_image, + # command=[ + # 'bash', + # '-c', + # 'pip install -r requirements.txt -t /asset-output && cp -au . /asset-output', + # ] + # ) + ), + rule_parameters=rule_parameters) + eval_compliance_fn = lambda_.Function(self, f"{rule_name}Function", **asdict(fn_arg)) + + # A custom rule that runs on configuration changes of EC2 instances + arg = CustomRule(lambda_function=eval_compliance_fn, rule_parameters=rule_parameters) + config.CustomRule(self, rule_name, **asdict(arg)) elif "SourceIdentifier" in rule_parameters["Parameters"] and rule_parameters["Parameters"]["SourceIdentifier"]: arg = ManagedRule(rule_parameters=rule_parameters) config.ManagedRule(self, rule_name, **asdict(arg)) - # elif rule_parameters["Parameters"]["SourceRuntime"] in rdk_supported_custom_rule_runtime: - # # Lambda function containing logic that evaluates compliance with the rule. - # eval_compliance_fn = lambda_.Function(self, "CustomFunction", - # code=lambda_.Code.asset(Path(self.node.try_get_context("rules_dir"))), - # handler="index.handler", - # runtime=lambda_.Runtime.NODEJS_14_X - # ) - - # # A custom rule that runs on configuration changes of EC2 instances - # config.CustomRule(self, "Custom", - # configuration_changes=True, - # lambda_function=eval_compliance_fn, - # rule_scope=config.RuleScope.from_resource(config.ResourceType.EC2_INSTANCE) - # ) else: print(f"Rule type not supported for Rule {rule_name}") continue @@ -59,7 +68,7 @@ def __init__(self, scope: Construct, construct_id: str, **kwargs) -> None: if "Remediation" in rule_parameters["Parameters"] and rule_parameters["Parameters"]["Remediation"]: arg = RemediationConfiguration(rule_parameters=rule_parameters) - config.CfnRemediationConfiguration(self, "MyCfnRemediationConfiguration", **asdict(arg)) + config.CfnRemediationConfiguration(self, f"{rule_name}RemediationConfiguration", **asdict(arg)) # # A rule to detect stack drifts # drift_rule = config.CloudFormationStackDriftDetectionCheck(self, "Drift") diff --git a/rdk/frameworks/cdk/cdk/core/config/custom_policy.py b/rdk/frameworks/cdk/cdk/core/config/custom_policy.py index 731ca075..2c274e35 100644 --- a/rdk/frameworks/cdk/cdk/core/config/custom_policy.py +++ b/rdk/frameworks/cdk/cdk/core/config/custom_policy.py @@ -10,7 +10,7 @@ @dataclass class CustomPolicy: """ - Defines Custom Policy. + Defines AWS Config Custom Policy. Parameters: @@ -45,10 +45,9 @@ def __init__(self, policy_text: str, rule_parameters: dict): self.input_parameters = json.loads(param["InputParameters"]) if "MaximumExecutionFrequency" in param: try: - maximum_execution_frequency = getattr(config.MaximumExecutionFrequency, param["SourcePeriodic"].upper()) + self.maximum_execution_frequency = getattr(config.MaximumExecutionFrequency, param["SourcePeriodic"].upper()) except: raise RdkParametersInvalidError("Invalid parameters found in Parameters.MaximumExecutionFrequency. Please review https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-config-configrule.html#cfn-config-configrule-maximumexecutionfrequency") - self.maximum_execution_frequency = maximum_execution_frequency if "SourceEvents" in param: try: source_events = getattr(config.ResourceType, param["SourceEvents"].upper().replace("AWS::", "").replace("::", "_")) diff --git a/rdk/frameworks/cdk/cdk/core/config/custom_rule.py b/rdk/frameworks/cdk/cdk/core/config/custom_rule.py new file mode 100644 index 00000000..63ea56ba --- /dev/null +++ b/rdk/frameworks/cdk/cdk/core/config/custom_rule.py @@ -0,0 +1,64 @@ +import json +from dataclasses import dataclass, field +from typing import Any, Dict, List, Optional + +from aws_cdk import aws_config as config +from aws_cdk import aws_lambda as _lambda + +from ..errors import RdkParametersInvalidError + + +@dataclass +class CustomRule: + """ + Defines AWS Config Custom Rule. + + Parameters: + + * **`lambda_function`** (_IFunction) – The Lambda function to run. + * **`configuration_changes`** (_Optional[bool]_): Optional - Whether to run the rule on configuration changes. Default: false + * **`periodic`** (_Optional[bool]_): Optional - Whether to run the rule on a fixed frequency. Default: false + * **`config_rule_name`** (_str_): A name for the AWS Config rule. Default: - CloudFormation generated name + * **`description`** (_str_): Optional - A description about this AWS Config rule. Default: - No description + * **`input_parameters`** (_Dict[str, Any]_): Optional - Input parameter values that are passed to the AWS Config rule. Default: - No input parameters + * **`maximum_execution_frequency`** (_MaximumExecutionFrequency_): Optional - The maximum frequency at which the AWS Config rule runs evaluations. + * **`rule_scope`** (_RuleScope_): Optional - Defines which resources trigger an evaluation for an AWS Config rule. Default: - evaluations for the rule are triggered when any resource in the recording group changes. + + + + """ + + lambda_function: _lambda.IFunction = field(init=False) + configuration_changes: Optional[bool] = None + periodic: Optional[bool] = None + config_rule_name: str = field(init=False) + description: Optional[str] = None + input_parameters: Optional[Dict[str, Any]] = None + maximum_execution_frequency: Optional[config.MaximumExecutionFrequency] = None + rule_scope: Optional[config.RuleScope] = None + + def __init__(self, lambda_function: _lambda.IFunction, rule_parameters: dict): + param = rule_parameters["Parameters"] + self.lambda_function = lambda_function + if "SourcePeriodic" in param: + self.periodic = True + if "RuleName" in param: + self.config_rule_name = param["RuleName"] + if "EnableDebugLogDelivery" in param: + self.enable_debug_log = True + if "Description" in param: + self.description = param["Description"] + if "InputParameters" in param: + self.input_parameters = json.loads(param["InputParameters"]) + if "MaximumExecutionFrequency" in param: + try: + self.maximum_execution_frequency = getattr(config.MaximumExecutionFrequency, param["SourcePeriodic"].upper()) + except: + raise RdkParametersInvalidError("Invalid parameters found in Parameters.MaximumExecutionFrequency. Please review https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-config-configrule.html#cfn-config-configrule-maximumexecutionfrequency") + if "SourceEvents" in param: + try: + self.configuration_changes = True + source_events = getattr(config.ResourceType, param["SourceEvents"].upper().replace("AWS::", "").replace("::", "_")) + except: + raise RdkParametersInvalidError("Invalid parameters found in Parameters.SourceEvents. Please review https://docs.aws.amazon.com/config/latest/developerguide/resource-config-reference.html") + self.rule_scope = config.RuleScope.from_resources([source_events]) diff --git a/rdk/frameworks/cdk/cdk/core/config/lambda_function.py b/rdk/frameworks/cdk/cdk/core/config/lambda_function.py new file mode 100644 index 00000000..28d8b36a --- /dev/null +++ b/rdk/frameworks/cdk/cdk/core/config/lambda_function.py @@ -0,0 +1,46 @@ +import json +from dataclasses import dataclass, field +from typing import Any, Dict, List, Optional, Sequence + +from aws_cdk import aws_lambda as lambda_ + +from ..errors import RdkParametersInvalidError + +from ..rule_parameters import ( + get_rule_name, + rdk_supported_custom_rule_runtime +) + + +@dataclass +class LambdaFunction: + """ + Defines Lambda Function. + + Parameters: + + * **`code`** (_Code_): The source code of your Lambda function. You can point to a file in an Amazon Simple Storage Service (Amazon S3) bucket or specify your source code as inline text. + * **`handler`** (_str_): The name of the method within your code that Lambda calls to execute your function. The format includes the file name. It can also include namespaces and other qualifiers, depending on the runtime. For more information, see https://docs.aws.amazon.com/lambda/latest/dg/foundation-progmodel.html. Use Handler.FROM_IMAGE when defining a function from a Docker image. NOTE: If you specify your source code as inline text by specifying the ZipFile property within the Code property, specify index.function_name as the handler. + * **`runtime`** (_Runtime_): The runtime environment for the Lambda function that you are uploading. For valid values, see the Runtime property in the AWS Lambda Developer Guide. Use Runtime.FROM_IMAGE when defining a function from a Docker image. + * **`layers`** (_Optional[Sequence[ILayerVersion]]_): Optional - A list of layers to add to the function’s execution environment. You can configure your Lambda function to pull in additional code during initialization in the form of layers. Layers are packages of libraries or other dependencies that can be used by multiple functions. Default: - No layers. + + """ + + code: lambda_.Code = field(init=False) + handler: str = field(init=False) + runtime: lambda_.Runtime = field(init=False) + layers: Optional[Sequence[lambda_.ILayerVersion]] + + # TODO: add support for more lambda configuration. + + def __init__(self, code: lambda_.Code , rule_parameters: dict): + param = rule_parameters["Parameters"] + self.code = code + self.handler= f"{param['RuleName']}.lambda_handler" + if "SourceRuntime" in param: + try: + self.runtime=getattr(lambda_.Runtime, param["SourceRuntime"].replace("-lib", "").replace("3.", "_3_").upper()) + except: + raise RdkParametersInvalidError(f"Invalid parameters found in Parameters.SourceRuntime. Current supported Lambda Runtime: {rdk_supported_custom_rule_runtime}") + if "-lib" in param["SourceRuntime"]: + self.layers = [] \ No newline at end of file diff --git a/rdk/frameworks/cdk/cdk/core/config/managed_rule.py b/rdk/frameworks/cdk/cdk/core/config/managed_rule.py index 9869bed6..1b1c2084 100644 --- a/rdk/frameworks/cdk/cdk/core/config/managed_rule.py +++ b/rdk/frameworks/cdk/cdk/core/config/managed_rule.py @@ -10,7 +10,7 @@ @dataclass class ManagedRule: """ - Defines Managed Rule. + Defines AWS Config Managed Rule. Parameters: @@ -57,10 +57,9 @@ def __init__(self, rule_parameters: dict): self.input_parameters = json.loads(param["InputParameters"]) if "MaximumExecutionFrequency" in param: try: - maximum_execution_frequency = getattr(config.MaximumExecutionFrequency, param["SourcePeriodic"].upper()) + self.maximum_execution_frequency = getattr(config.MaximumExecutionFrequency, param["SourcePeriodic"].upper()) except: raise RdkParametersInvalidError("Invalid parameters found in Parameters.MaximumExecutionFrequency. Please review https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-config-configrule.html#cfn-config-configrule-maximumexecutionfrequency") - self.maximum_execution_frequency = maximum_execution_frequency if "SourceEvents" in param: try: source_events = getattr(config.ResourceType, param["SourceEvents"].upper().replace("AWS::", "").replace("::", "_").replace("EC2_VOLUME", "EBS_VOLUME")) # cdk use EBS Volume instead of EC2 Volume diff --git a/rdk/frameworks/cdk/cdk/core/config/remediation_configuration.py b/rdk/frameworks/cdk/cdk/core/config/remediation_configuration.py index 87b576a7..e3489f0a 100644 --- a/rdk/frameworks/cdk/cdk/core/config/remediation_configuration.py +++ b/rdk/frameworks/cdk/cdk/core/config/remediation_configuration.py @@ -11,7 +11,7 @@ @dataclass class RemediationConfiguration: """ - Defines Remediation Configuration. + Defines AWS Config Rule Remediation Configuration. Parameters: diff --git a/rdk/frameworks/cdk/cdk/core/rule_parameters.py b/rdk/frameworks/cdk/cdk/core/rule_parameters.py index 13c15208..83929e7d 100644 --- a/rdk/frameworks/cdk/cdk/core/rule_parameters.py +++ b/rdk/frameworks/cdk/cdk/core/rule_parameters.py @@ -18,8 +18,10 @@ "python3.8-lib", "python3.9", "python3.9-lib", - "nodejs6.10", - "nodejs8.10", + "python3.10", + "python3.10-lib", + # "nodejs6.10", + # "nodejs8.10", ] def get_rule_parameters(rule_dir: Path): @@ -49,7 +51,7 @@ def get_rule_name(rule_path: Path): def get_deploy_rules_list(rules_dir: Path, deployment_mode: str = "all",): deploy_rules_list = [] for path in rules_dir.absolute().glob("**/parameters.json"): - if rules_dir.absolute().joinpath("rdk").as_posix() not in path.as_posix(): + if "build/" not in path.as_posix(): if deployment_mode == "all": deploy_rules_list.append(path.parent) # Add support for java and cs diff --git a/rdk/runners/cdk.py b/rdk/runners/cdk.py index 75326dc1..1114444e 100644 --- a/rdk/runners/cdk.py +++ b/rdk/runners/cdk.py @@ -68,6 +68,29 @@ def synthesize(self): allowed_return_codes=[0, 2], ) + def diff(self): + """ + Executes `cdk diff`. + + Parameters: + + """ + cmd = [ + "cdk", + "diff", + "--context", + "rules_dir=" + self.rules_dir.as_posix() + ] + + + self.logger.info("Showing differences on CloudFormation template(s)...") + + self.run_cmd( + cmd=cmd, + cwd=self.cdk_app_dir.as_posix(), + allowed_return_codes=[0, 2], + ) + def bootstrap(self): """ Executes `cdk bootstrap`. diff --git a/tests/integration/rdk-cdk-rule-dir/AMI_DEPRECATED_CHECK/requirements.txt b/tests/integration/rdk-cdk-rule-dir/AMI_DEPRECATED_CHECK/requirements.txt new file mode 100644 index 00000000..23ffcd5f --- /dev/null +++ b/tests/integration/rdk-cdk-rule-dir/AMI_DEPRECATED_CHECK/requirements.txt @@ -0,0 +1 @@ +rdklib>=0.2 \ No newline at end of file From ac473991fbf58ae563fcdcd2c4a9d353f82625ea Mon Sep 17 00:00:00 2001 From: Ricky Chau Date: Fri, 21 Apr 2023 18:22:18 -0400 Subject: [PATCH 11/23] feat(rdklib): add support to rdklib --- rdk/frameworks/cdk/cdk/cdk_stack.py | 18 ++++++------------ .../cdk/cdk/core/config/lambda_function.py | 13 ++++++++++--- 2 files changed, 16 insertions(+), 15 deletions(-) diff --git a/rdk/frameworks/cdk/cdk/cdk_stack.py b/rdk/frameworks/cdk/cdk/cdk_stack.py index 8798ec06..ba3f4eb3 100644 --- a/rdk/frameworks/cdk/cdk/cdk_stack.py +++ b/rdk/frameworks/cdk/cdk/cdk_stack.py @@ -41,19 +41,13 @@ def __init__(self, scope: Construct, construct_id: str, **kwargs) -> None: config.CustomPolicy(self, rule_name, **asdict(arg)) elif "SourceRuntime" in rule_parameters["Parameters"] and rule_parameters["Parameters"]["SourceRuntime"] in rdk_supported_custom_rule_runtime: # Lambda function containing logic that evaluates compliance with the rule. - fn_arg = LambdaFunction(code=lambda_.Code.from_asset(rule_path.as_posix() - # , - # bundling=cdk.BundlingOptions( - # image=lambda_.Runtime.PYTHON_3_9.bundling_image, - # command=[ - # 'bash', - # '-c', - # 'pip install -r requirements.txt -t /asset-output && cp -au . /asset-output', - # ] - # ) - ), + fn_arg = LambdaFunction(code=lambda_.Code.from_asset(rule_path.as_posix()), rule_parameters=rule_parameters) - eval_compliance_fn = lambda_.Function(self, f"{rule_name}Function", **asdict(fn_arg)) + if "-lib" in rule_parameters["Parameters"]["SourceRuntime"]: + layer_version_arn = fn_arg.get_latest_rdklib_lambda_layer_version_arn() + latest_layer = lambda_.LayerVersion.from_layer_version_arn(scope=self, id='rdklayerversion', layer_version_arn=layer_version_arn) + # fn_arg.layers.append(latest_layer) + eval_compliance_fn = lambda_.Function(self, f"{rule_name}Function", **asdict(fn_arg), layers=[latest_layer]) # A custom rule that runs on configuration changes of EC2 instances arg = CustomRule(lambda_function=eval_compliance_fn, rule_parameters=rule_parameters) diff --git a/rdk/frameworks/cdk/cdk/core/config/lambda_function.py b/rdk/frameworks/cdk/cdk/core/config/lambda_function.py index 28d8b36a..ca5948fa 100644 --- a/rdk/frameworks/cdk/cdk/core/config/lambda_function.py +++ b/rdk/frameworks/cdk/cdk/core/config/lambda_function.py @@ -1,7 +1,9 @@ import json +import boto3 from dataclasses import dataclass, field from typing import Any, Dict, List, Optional, Sequence +from constructs import Construct from aws_cdk import aws_lambda as lambda_ from ..errors import RdkParametersInvalidError @@ -29,7 +31,7 @@ class LambdaFunction: code: lambda_.Code = field(init=False) handler: str = field(init=False) runtime: lambda_.Runtime = field(init=False) - layers: Optional[Sequence[lambda_.ILayerVersion]] + # layers: Optional[Sequence[lambda_.ILayerVersion]] # TODO: add support for more lambda configuration. @@ -37,10 +39,15 @@ def __init__(self, code: lambda_.Code , rule_parameters: dict): param = rule_parameters["Parameters"] self.code = code self.handler= f"{param['RuleName']}.lambda_handler" + # self.layers = [] if "SourceRuntime" in param: try: self.runtime=getattr(lambda_.Runtime, param["SourceRuntime"].replace("-lib", "").replace("3.", "_3_").upper()) except: raise RdkParametersInvalidError(f"Invalid parameters found in Parameters.SourceRuntime. Current supported Lambda Runtime: {rdk_supported_custom_rule_runtime}") - if "-lib" in param["SourceRuntime"]: - self.layers = [] \ No newline at end of file + + def get_latest_rdklib_lambda_layer_version_arn(self, layer_name: str = "rdklib-layer"): + response = boto3.client("lambda").list_layer_versions(LayerName=layer_name) + layer_versions = response["LayerVersions"] + latest_version = sorted(layer_versions, key=lambda d: d['Version'])[-1] + return latest_version["LayerVersionArn"] \ No newline at end of file From 5256a70d129093136a0619e46fdae3884256ca61 Mon Sep 17 00:00:00 2001 From: Benjamin Morris Date: Fri, 28 Apr 2023 16:14:55 -0700 Subject: [PATCH 12/23] reformat using black --- README.md | 27 ++- rdk/cli/commands/test.py | 4 +- rdk/cli/main.py | 20 +- rdk/core/errors.py | 2 +- rdk/core/rules_deploy.py | 12 +- rdk/core/rules_test.py | 62 ++++-- rdk/frameworks/cdk/app.py | 15 +- rdk/frameworks/cdk/cdk/cdk_stack.py | 61 ++++-- .../cdk/cdk/core/config/custom_policy.py | 20 +- .../cdk/cdk/core/config/custom_rule.py | 22 +- .../cdk/cdk/core/config/lambda_function.py | 37 ++-- .../cdk/cdk/core/config/managed_rule.py | 41 ++-- .../core/config/remediation_configuration.py | 40 +++- rdk/frameworks/cdk/cdk/core/errors.py | 20 +- .../cdk/cdk/core/rule_parameters.py | 80 +++++--- .../cdk/tests/unit/test_cdk_stack.py | 1 + rdk/runners/base.py | 1 - rdk/runners/cdk.py | 24 +-- rdk/runners/cfn_guard.py | 13 +- requirements.txt | 2 + .../AMI_DEPRECATED_CHECK.py | 114 ++++++----- .../AMI_DEPRECATED_CHECK_test.py | 193 ++++++++++-------- tests/unit/conftest.py | 2 +- tests/unit/core/test_errors.py | 16 +- tests/unit/runners/test_base.py | 8 +- 25 files changed, 507 insertions(+), 330 deletions(-) diff --git a/README.md b/README.md index b81187fd..db61993d 100644 --- a/README.md +++ b/README.md @@ -1,16 +1,23 @@ +# Developer Instructions -# Steps to setup your local environment -make freeze -make init +These steps are used for developers who want to make and test changes to the RDK source code. -# Editorable mode by activate pipenv -pipenv shell +## Prerequisites -# Navigagte to rules dir in integration test -cd tests/rdk-cdk-int-rules-dir +Install cfn-guard: https://docs.aws.amazon.com/cfn-guard/latest/ug/setting-up-linux.html + +## Set up your local environment +`make freeze` +`make init` + +# Activate pipenv +`pipenv shell` + +# Navigate to rules dir in integration test +`cd tests/integration/rdk-cdk-int-rules-dir` # Run RDK command for testing -rdk test -rdk deploy -rdk destroy \ No newline at end of file +`rdk test` +`rdk deploy` +`rdk destroy` diff --git a/rdk/cli/commands/test.py b/rdk/cli/commands/test.py index dfd3414c..eef3e225 100644 --- a/rdk/cli/commands/test.py +++ b/rdk/cli/commands/test.py @@ -5,11 +5,11 @@ from rdk.utils.logger import get_main_logger -def run(rulenames: List[str], verbose = False): +def run(rulenames: List[str], verbose=False): """ test sub-command handler. """ logger = get_main_logger() logger.info("RDK is starting ...") - sys.exit(RulesTest(rulenames=rulenames, verbose = verbose).run()) + sys.exit(RulesTest(rulenames=rulenames, verbose=verbose).run()) diff --git a/rdk/cli/main.py b/rdk/cli/main.py index 3e3c209b..f5a925e3 100644 --- a/rdk/cli/main.py +++ b/rdk/cli/main.py @@ -68,11 +68,11 @@ def main(): ) commands_parser_deploy.add_argument( - "rulename", + "rulename", metavar="", - nargs="*", + nargs="*", default="", - help="Rule name(s) to deploy. Rule(s) will be pushed to AWS." + help="Rule name(s) to deploy. Rule(s) will be pushed to AWS.", ) commands_parser_deploy.add_argument( @@ -90,11 +90,11 @@ def main(): ) commands_parser_test.add_argument( - "rulename", + "rulename", metavar="", - nargs="*", + nargs="*", default="", - help="Rule name(s) to test. Unit test of the rule(s) will be executed." + help="Rule name(s) to test. Unit test of the rule(s) will be executed.", ) commands_parser_test.add_argument( @@ -112,11 +112,11 @@ def main(): ) commands_parser_destroy.add_argument( - "rulename", + "rulename", metavar="", - nargs="*", + nargs="*", default="", - help="Rule name(s) to destroy. Rule(s) will be removed." + help="Rule name(s) to destroy. Rule(s) will be removed.", ) commands_parser_destroy.add_argument( @@ -171,4 +171,4 @@ def main(): destroy_cmd.run( rulenames=args.rulename, dryrun=args.dryrun, - ) \ No newline at end of file + ) diff --git a/rdk/core/errors.py b/rdk/core/errors.py index 468d0ab5..56582564 100644 --- a/rdk/core/errors.py +++ b/rdk/core/errors.py @@ -24,4 +24,4 @@ class RdkCommandExecutionError(RdkError): class RdkCommandNotAllowedError(RdkError): """ An unsupported command was requested to be executed. - """ \ No newline at end of file + """ diff --git a/rdk/core/rules_deploy.py b/rdk/core/rules_deploy.py index ee4e20fa..b2f748eb 100644 --- a/rdk/core/rules_deploy.py +++ b/rdk/core/rules_deploy.py @@ -34,11 +34,9 @@ def run(self): if len(self.rulenames) > 0: rules_dir = Path(self.rulenames[0]) else: - rules_dir=Path().absolute() + rules_dir = Path().absolute() - cdk_runner = CdkRunner( - rules_dir=rules_dir - ) + cdk_runner = CdkRunner(rules_dir=rules_dir) cdk_runner.diff() cdk_runner.synthesize() @@ -52,9 +50,7 @@ def destroy(self): if len(self.rulenames) > 0: rules_dir = Path(self.rulenames[0]) else: - rules_dir=Path().absolute() + rules_dir = Path().absolute() - cdk_runner = CdkRunner( - rules_dir=rules_dir - ) + cdk_runner = CdkRunner(rules_dir=rules_dir) cdk_runner.destroy() diff --git a/rdk/core/rules_test.py b/rdk/core/rules_test.py index 01e1a61b..97374a22 100644 --- a/rdk/core/rules_test.py +++ b/rdk/core/rules_test.py @@ -39,10 +39,7 @@ def run(self): self.logger.info("Running local test!") tests_successful = True rules_list = [] - test_report = { - "pytest_results": [], - "cfn_guard_results": [] - } + test_report = {"pytest_results": [], "cfn_guard_results": []} cwd = Path().absolute() # Construct our list of rules to test. @@ -72,19 +69,27 @@ def run(self): elif runtime in ["cloudformation-guard2.0", "guard-2.x.x"]: test_report["cfn_guard_results"] += self._run_cfn_guard_test(test_dir) else: - self.logger.info(f"Skipping {rule_name} - The Custom Rule Runtime provided or Managed Rule is not supported for unit testing.") + self.logger.info( + f"Skipping {rule_name} - The Custom Rule Runtime provided or Managed Rule is not supported for unit testing." + ) exit(self._result_summary(test_report)) def _run_pytest(self, test_dir: Path): loader = unittest.TestLoader() - suite = loader.discover(test_dir, pattern = "*_test.py") + suite = loader.discover(test_dir, pattern="*_test.py") results = unittest.TextTestRunner(buffer=self.verbose, verbosity=2).run(suite) if len(results.errors) == 0 and len(results.failures) == 0: - status = "PASSED" + status = "PASSED" else: status = "FAILED" - return { "rule_dir": test_dir.name, "status": status,"test_run": results.testsRun, "errors": results.errors, "failures": results.failures } + return { + "rule_dir": test_dir.name, + "status": status, + "test_run": results.testsRun, + "errors": results.errors, + "failures": results.failures, + } def _run_cfn_guard_test(self, test_dir: Path): report = [] @@ -92,15 +97,38 @@ def _run_cfn_guard_test(self, test_dir: Path): for test_path in test_dir.glob("**/*"): self.logger.info(f"Running test {test_path}") self.logger.info(f"Test file: {test_path.suffix} {test_path.name}") - if test_path.suffix in [".json", ".yaml", ".yml"] and test_path.name != "parameters.json": - cfn_guard_runner = CfnGuardRunner(rules_file=test_dir.joinpath("rule_code.guard"), test_data=test_path, verbose=self.verbose) + if ( + test_path.suffix in [".json", ".yaml", ".yml"] + and test_path.name != "parameters.json" + ): + cfn_guard_runner = CfnGuardRunner( + rules_file=test_dir.joinpath("rule_code.guard"), + test_data=test_path, + verbose=self.verbose, + ) try: results = cfn_guard_runner.test() - report.append({"rule_dir": f"{test_dir.name}/{test_path.name}", "status": "PASSED", "test_run": results.count("Test Case #"), "errors": [], "failures": []}) + report.append( + { + "rule_dir": f"{test_dir.name}/{test_path.name}", + "status": "PASSED", + "test_run": results.count("Test Case #"), + "errors": [], + "failures": [], + } + ) except Exception as e: - report.append({"rule_dir": f"{test_dir.name}/{test_path.name}", "status": "FAILED", "test_run": results.count("Test Case #"), "errors": [e], "failures": [results]}) + report.append( + { + "rule_dir": f"{test_dir.name}/{test_path.name}", + "status": "FAILED", + "test_run": results.count("Test Case #"), + "errors": [e], + "failures": [results], + } + ) return report - + def _result_summary(self, test_report: Dict[str, Any]): pytest_results = test_report["pytest_results"] cfn_guard_results = test_report["cfn_guard_results"] @@ -117,11 +145,13 @@ def _result_summary(self, test_report: Dict[str, Any]): exit_code = self._show_result(cfn_guard_results) and exit_code return exit_code - def _show_result(self, report_results: Dict[str, Any]): + def _show_result(self, report_results: Dict[str, Any]): exit_code = 0 for result in report_results: self.logger.info(f"{result['rule_dir']} - ") - self.logger.info(f"\tStatus: {result['status']} tests_run:{result['test_run']}") + self.logger.info( + f"\tStatus: {result['status']} tests_run:{result['test_run']}" + ) if result["errors"]: exit_code = 1 for error in result["errors"]: @@ -130,5 +160,5 @@ def _show_result(self, report_results: Dict[str, Any]): exit_code = 2 for failure in result["failures"]: if failure != "": - self.logger.info(f"\tTest failures found: {failure}") + self.logger.info(f"\tTest failures found: {failure}") return exit_code diff --git a/rdk/frameworks/cdk/app.py b/rdk/frameworks/cdk/app.py index 441d0761..2859800d 100644 --- a/rdk/frameworks/cdk/app.py +++ b/rdk/frameworks/cdk/app.py @@ -5,22 +5,19 @@ from cdk.cdk_stack import CdkStack app = cdk.App() -CdkStack(app, "CdkStack", +CdkStack( + app, + "CdkStack", # If you don't specify 'env', this stack will be environment-agnostic. # Account/Region-dependent features and context lookups will not work, # but a single synthesized template can be deployed anywhere. - # Uncomment the next line to specialize this stack for the AWS Account # and Region that are implied by the current CLI configuration. - - #env=cdk.Environment(account=os.getenv('CDK_DEFAULT_ACCOUNT'), region=os.getenv('CDK_DEFAULT_REGION')), - + # env=cdk.Environment(account=os.getenv('CDK_DEFAULT_ACCOUNT'), region=os.getenv('CDK_DEFAULT_REGION')), # Uncomment the next line if you know exactly what Account and Region you # want to deploy the stack to. */ - - #env=cdk.Environment(account='123456789012', region='us-east-1'), - + # env=cdk.Environment(account='123456789012', region='us-east-1'), # For more information, see https://docs.aws.amazon.com/cdk/latest/guide/environments.html - ) +) app.synth() diff --git a/rdk/frameworks/cdk/cdk/cdk_stack.py b/rdk/frameworks/cdk/cdk/cdk_stack.py index ba3f4eb3..6a41ecdc 100644 --- a/rdk/frameworks/cdk/cdk/cdk_stack.py +++ b/rdk/frameworks/cdk/cdk/cdk_stack.py @@ -25,7 +25,6 @@ class CdkStack(Stack): - def __init__(self, scope: Construct, construct_id: str, **kwargs) -> None: super().__init__(scope, construct_id, **kwargs) @@ -36,33 +35,65 @@ def __init__(self, scope: Construct, construct_id: str, **kwargs) -> None: rule_name = get_rule_name(rule_path) rule_parameters = get_rule_parameters(rule_path) print(f"Adding Rule {rule_name} ...") - if "SourceRuntime" in rule_parameters["Parameters"] and rule_parameters["Parameters"]["SourceRuntime"] in ["cloudformation-guard2.0", "guard-2.x.x"]: - arg = CustomPolicy(policy_text=rule_path.joinpath("rule_code.guard").read_text(), rule_parameters=rule_parameters) + if "SourceRuntime" in rule_parameters["Parameters"] and rule_parameters[ + "Parameters" + ]["SourceRuntime"] in ["cloudformation-guard2.0", "guard-2.x.x"]: + arg = CustomPolicy( + policy_text=rule_path.joinpath("rule_code.guard").read_text(), + rule_parameters=rule_parameters, + ) config.CustomPolicy(self, rule_name, **asdict(arg)) - elif "SourceRuntime" in rule_parameters["Parameters"] and rule_parameters["Parameters"]["SourceRuntime"] in rdk_supported_custom_rule_runtime: + elif ( + "SourceRuntime" in rule_parameters["Parameters"] + and rule_parameters["Parameters"]["SourceRuntime"] + in rdk_supported_custom_rule_runtime + ): # Lambda function containing logic that evaluates compliance with the rule. - fn_arg = LambdaFunction(code=lambda_.Code.from_asset(rule_path.as_posix()), - rule_parameters=rule_parameters) + fn_arg = LambdaFunction( + code=lambda_.Code.from_asset(rule_path.as_posix()), + rule_parameters=rule_parameters, + ) if "-lib" in rule_parameters["Parameters"]["SourceRuntime"]: - layer_version_arn = fn_arg.get_latest_rdklib_lambda_layer_version_arn() - latest_layer = lambda_.LayerVersion.from_layer_version_arn(scope=self, id='rdklayerversion', layer_version_arn=layer_version_arn) + layer_version_arn = ( + fn_arg.get_latest_rdklib_lambda_layer_version_arn() + ) + latest_layer = lambda_.LayerVersion.from_layer_version_arn( + scope=self, + id="rdklayerversion", + layer_version_arn=layer_version_arn, + ) # fn_arg.layers.append(latest_layer) - eval_compliance_fn = lambda_.Function(self, f"{rule_name}Function", **asdict(fn_arg), layers=[latest_layer]) + eval_compliance_fn = lambda_.Function( + self, + f"{rule_name}Function", + **asdict(fn_arg), + layers=[latest_layer], + ) # A custom rule that runs on configuration changes of EC2 instances - arg = CustomRule(lambda_function=eval_compliance_fn, rule_parameters=rule_parameters) + arg = CustomRule( + lambda_function=eval_compliance_fn, rule_parameters=rule_parameters + ) config.CustomRule(self, rule_name, **asdict(arg)) - elif "SourceIdentifier" in rule_parameters["Parameters"] and rule_parameters["Parameters"]["SourceIdentifier"]: + elif ( + "SourceIdentifier" in rule_parameters["Parameters"] + and rule_parameters["Parameters"]["SourceIdentifier"] + ): arg = ManagedRule(rule_parameters=rule_parameters) config.ManagedRule(self, rule_name, **asdict(arg)) else: print(f"Rule type not supported for Rule {rule_name}") continue # raise RdkRuleTypesInvalidError(f"Error loading parameters file for Rule {rule_name}") - - if "Remediation" in rule_parameters["Parameters"] and rule_parameters["Parameters"]["Remediation"]: + + if ( + "Remediation" in rule_parameters["Parameters"] + and rule_parameters["Parameters"]["Remediation"] + ): arg = RemediationConfiguration(rule_parameters=rule_parameters) - config.CfnRemediationConfiguration(self, f"{rule_name}RemediationConfiguration", **asdict(arg)) + config.CfnRemediationConfiguration( + self, f"{rule_name}RemediationConfiguration", **asdict(arg) + ) # # A rule to detect stack drifts # drift_rule = config.CloudFormationStackDriftDetectionCheck(self, "Drift") @@ -72,4 +103,4 @@ def __init__(self, scope: Construct, construct_id: str, **kwargs) -> None: # # Send notification on compliance change events # drift_rule.on_compliance_change("ComplianceChange", # target=targets.SnsTopic(compliance_topic) - # ) \ No newline at end of file + # ) diff --git a/rdk/frameworks/cdk/cdk/core/config/custom_policy.py b/rdk/frameworks/cdk/cdk/core/config/custom_policy.py index 2c274e35..7ee02a9c 100644 --- a/rdk/frameworks/cdk/cdk/core/config/custom_policy.py +++ b/rdk/frameworks/cdk/cdk/core/config/custom_policy.py @@ -45,12 +45,24 @@ def __init__(self, policy_text: str, rule_parameters: dict): self.input_parameters = json.loads(param["InputParameters"]) if "MaximumExecutionFrequency" in param: try: - self.maximum_execution_frequency = getattr(config.MaximumExecutionFrequency, param["SourcePeriodic"].upper()) + self.maximum_execution_frequency = getattr( + config.MaximumExecutionFrequency, param["SourcePeriodic"].upper() + ) except: - raise RdkParametersInvalidError("Invalid parameters found in Parameters.MaximumExecutionFrequency. Please review https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-config-configrule.html#cfn-config-configrule-maximumexecutionfrequency") + raise RdkParametersInvalidError( + "Invalid parameters found in Parameters.MaximumExecutionFrequency. Please review https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-config-configrule.html#cfn-config-configrule-maximumexecutionfrequency" + ) if "SourceEvents" in param: try: - source_events = getattr(config.ResourceType, param["SourceEvents"].upper().replace("AWS::", "").replace("::", "_")) + source_events = getattr( + config.ResourceType, + param["SourceEvents"] + .upper() + .replace("AWS::", "") + .replace("::", "_"), + ) except: - raise RdkParametersInvalidError("Invalid parameters found in Parameters.SourceEvents. Please review https://docs.aws.amazon.com/config/latest/developerguide/resource-config-reference.html") + raise RdkParametersInvalidError( + "Invalid parameters found in Parameters.SourceEvents. Please review https://docs.aws.amazon.com/config/latest/developerguide/resource-config-reference.html" + ) self.rule_scope = config.RuleScope.from_resources([source_events]) diff --git a/rdk/frameworks/cdk/cdk/core/config/custom_rule.py b/rdk/frameworks/cdk/cdk/core/config/custom_rule.py index 63ea56ba..af2310be 100644 --- a/rdk/frameworks/cdk/cdk/core/config/custom_rule.py +++ b/rdk/frameworks/cdk/cdk/core/config/custom_rule.py @@ -24,7 +24,7 @@ class CustomRule: * **`maximum_execution_frequency`** (_MaximumExecutionFrequency_): Optional - The maximum frequency at which the AWS Config rule runs evaluations. * **`rule_scope`** (_RuleScope_): Optional - Defines which resources trigger an evaluation for an AWS Config rule. Default: - evaluations for the rule are triggered when any resource in the recording group changes. - + """ @@ -52,13 +52,25 @@ def __init__(self, lambda_function: _lambda.IFunction, rule_parameters: dict): self.input_parameters = json.loads(param["InputParameters"]) if "MaximumExecutionFrequency" in param: try: - self.maximum_execution_frequency = getattr(config.MaximumExecutionFrequency, param["SourcePeriodic"].upper()) + self.maximum_execution_frequency = getattr( + config.MaximumExecutionFrequency, param["SourcePeriodic"].upper() + ) except: - raise RdkParametersInvalidError("Invalid parameters found in Parameters.MaximumExecutionFrequency. Please review https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-config-configrule.html#cfn-config-configrule-maximumexecutionfrequency") + raise RdkParametersInvalidError( + "Invalid parameters found in Parameters.MaximumExecutionFrequency. Please review https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-config-configrule.html#cfn-config-configrule-maximumexecutionfrequency" + ) if "SourceEvents" in param: try: self.configuration_changes = True - source_events = getattr(config.ResourceType, param["SourceEvents"].upper().replace("AWS::", "").replace("::", "_")) + source_events = getattr( + config.ResourceType, + param["SourceEvents"] + .upper() + .replace("AWS::", "") + .replace("::", "_"), + ) except: - raise RdkParametersInvalidError("Invalid parameters found in Parameters.SourceEvents. Please review https://docs.aws.amazon.com/config/latest/developerguide/resource-config-reference.html") + raise RdkParametersInvalidError( + "Invalid parameters found in Parameters.SourceEvents. Please review https://docs.aws.amazon.com/config/latest/developerguide/resource-config-reference.html" + ) self.rule_scope = config.RuleScope.from_resources([source_events]) diff --git a/rdk/frameworks/cdk/cdk/core/config/lambda_function.py b/rdk/frameworks/cdk/cdk/core/config/lambda_function.py index ca5948fa..62d00986 100644 --- a/rdk/frameworks/cdk/cdk/core/config/lambda_function.py +++ b/rdk/frameworks/cdk/cdk/core/config/lambda_function.py @@ -8,10 +8,7 @@ from ..errors import RdkParametersInvalidError -from ..rule_parameters import ( - get_rule_name, - rdk_supported_custom_rule_runtime -) +from ..rule_parameters import get_rule_name, rdk_supported_custom_rule_runtime @dataclass @@ -25,29 +22,39 @@ class LambdaFunction: * **`handler`** (_str_): The name of the method within your code that Lambda calls to execute your function. The format includes the file name. It can also include namespaces and other qualifiers, depending on the runtime. For more information, see https://docs.aws.amazon.com/lambda/latest/dg/foundation-progmodel.html. Use Handler.FROM_IMAGE when defining a function from a Docker image. NOTE: If you specify your source code as inline text by specifying the ZipFile property within the Code property, specify index.function_name as the handler. * **`runtime`** (_Runtime_): The runtime environment for the Lambda function that you are uploading. For valid values, see the Runtime property in the AWS Lambda Developer Guide. Use Runtime.FROM_IMAGE when defining a function from a Docker image. * **`layers`** (_Optional[Sequence[ILayerVersion]]_): Optional - A list of layers to add to the function’s execution environment. You can configure your Lambda function to pull in additional code during initialization in the form of layers. Layers are packages of libraries or other dependencies that can be used by multiple functions. Default: - No layers. - + """ code: lambda_.Code = field(init=False) handler: str = field(init=False) runtime: lambda_.Runtime = field(init=False) # layers: Optional[Sequence[lambda_.ILayerVersion]] - - # TODO: add support for more lambda configuration. - def __init__(self, code: lambda_.Code , rule_parameters: dict): + # TODO: add support for more lambda configuration. + + def __init__(self, code: lambda_.Code, rule_parameters: dict): param = rule_parameters["Parameters"] self.code = code - self.handler= f"{param['RuleName']}.lambda_handler" + self.handler = f"{param['RuleName']}.lambda_handler" # self.layers = [] if "SourceRuntime" in param: try: - self.runtime=getattr(lambda_.Runtime, param["SourceRuntime"].replace("-lib", "").replace("3.", "_3_").upper()) + self.runtime = getattr( + lambda_.Runtime, + param["SourceRuntime"] + .replace("-lib", "") + .replace("3.", "_3_") + .upper(), + ) except: - raise RdkParametersInvalidError(f"Invalid parameters found in Parameters.SourceRuntime. Current supported Lambda Runtime: {rdk_supported_custom_rule_runtime}") - - def get_latest_rdklib_lambda_layer_version_arn(self, layer_name: str = "rdklib-layer"): + raise RdkParametersInvalidError( + f"Invalid parameters found in Parameters.SourceRuntime. Current supported Lambda Runtime: {rdk_supported_custom_rule_runtime}" + ) + + def get_latest_rdklib_lambda_layer_version_arn( + self, layer_name: str = "rdklib-layer" + ): response = boto3.client("lambda").list_layer_versions(LayerName=layer_name) layer_versions = response["LayerVersions"] - latest_version = sorted(layer_versions, key=lambda d: d['Version'])[-1] - return latest_version["LayerVersionArn"] \ No newline at end of file + latest_version = sorted(layer_versions, key=lambda d: d["Version"])[-1] + return latest_version["LayerVersionArn"] diff --git a/rdk/frameworks/cdk/cdk/core/config/managed_rule.py b/rdk/frameworks/cdk/cdk/core/config/managed_rule.py index 1b1c2084..fc0e1782 100644 --- a/rdk/frameworks/cdk/cdk/core/config/managed_rule.py +++ b/rdk/frameworks/cdk/cdk/core/config/managed_rule.py @@ -41,28 +41,43 @@ def __init__(self, rule_parameters: dict): except: if identifier in [ # exception list for unmatching identifiers https://docs.aws.amazon.com/cdk/api/v2/python/aws_cdk.aws_config/ManagedRuleIdentifiers.html - "MULTI_REGION_CLOUD_TRAIL_ENABLED", - "ENCRYPTED_VOLUMES", - "DESIRED_INSTANCE_TENANCY", - "DESIRED_INSTANCE_TYPE", - "INSTANCES_IN_VPC", - "INCOMING_SSH_DISABLED" - ]: + "MULTI_REGION_CLOUD_TRAIL_ENABLED", + "ENCRYPTED_VOLUMES", + "DESIRED_INSTANCE_TENANCY", + "DESIRED_INSTANCE_TYPE", + "INSTANCES_IN_VPC", + "INCOMING_SSH_DISABLED", + ]: self.identifier = identifier else: - raise RdkParametersInvalidError("Invalid parameters found in Parameters.SourceIdentifier. Please review https://docs.aws.amazon.com/config/latest/developerguide/managed-rules-by-aws-config.html") + raise RdkParametersInvalidError( + "Invalid parameters found in Parameters.SourceIdentifier. Please review https://docs.aws.amazon.com/config/latest/developerguide/managed-rules-by-aws-config.html" + ) if "Description" in param: self.description = param["Description"] if "InputParameters" in param: self.input_parameters = json.loads(param["InputParameters"]) if "MaximumExecutionFrequency" in param: try: - self.maximum_execution_frequency = getattr(config.MaximumExecutionFrequency, param["SourcePeriodic"].upper()) + self.maximum_execution_frequency = getattr( + config.MaximumExecutionFrequency, param["SourcePeriodic"].upper() + ) except: - raise RdkParametersInvalidError("Invalid parameters found in Parameters.MaximumExecutionFrequency. Please review https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-config-configrule.html#cfn-config-configrule-maximumexecutionfrequency") + raise RdkParametersInvalidError( + "Invalid parameters found in Parameters.MaximumExecutionFrequency. Please review https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-config-configrule.html#cfn-config-configrule-maximumexecutionfrequency" + ) if "SourceEvents" in param: try: - source_events = getattr(config.ResourceType, param["SourceEvents"].upper().replace("AWS::", "").replace("::", "_").replace("EC2_VOLUME", "EBS_VOLUME")) # cdk use EBS Volume instead of EC2 Volume + source_events = getattr( + config.ResourceType, + param["SourceEvents"] + .upper() + .replace("AWS::", "") + .replace("::", "_") + .replace("EC2_VOLUME", "EBS_VOLUME"), + ) # cdk use EBS Volume instead of EC2 Volume except: - raise RdkParametersInvalidError("Invalid parameters found in Parameters.SourceEvents. Please review https://docs.aws.amazon.com/config/latest/developerguide/resource-config-reference.html") - self.rule_scope = config.RuleScope.from_resources([source_events]) \ No newline at end of file + raise RdkParametersInvalidError( + "Invalid parameters found in Parameters.SourceEvents. Please review https://docs.aws.amazon.com/config/latest/developerguide/resource-config-reference.html" + ) + self.rule_scope = config.RuleScope.from_resources([source_events]) diff --git a/rdk/frameworks/cdk/cdk/core/config/remediation_configuration.py b/rdk/frameworks/cdk/cdk/core/config/remediation_configuration.py index e3489f0a..17248b18 100644 --- a/rdk/frameworks/cdk/cdk/core/config/remediation_configuration.py +++ b/rdk/frameworks/cdk/cdk/core/config/remediation_configuration.py @@ -25,24 +25,31 @@ class RemediationConfiguration: * **`resource_type`** (_Optional[str]_) : Optional - The type of a resource. * **`retry_attempt_seconds`** (_Union[int, float, None]_) : Optional - Maximum time in seconds that AWS Config runs auto-remediation. If you do not select a number, the default is 60 seconds. For example, if you specify RetryAttemptSeconds as 50 seconds and MaximumAutomaticAttempts as 5, AWS Config will run auto-remediations 5 times within 50 seconds before throwing an exception. * **`target_version`** (_Optional[str]_) : Optional - Version of the target. For example, version of the SSM document. .. epigraph:: If you make backward incompatible changes to the SSM document, you must call PutRemediationConfiguration API again to ensure the remediations can run. - + """ + config_rule_name: str = field(init=False) target_id: str = field(init=False) target_type: str = field(init=False) automatic: Optional[Union[bool, IResolvable, None]] = None - execution_controls: Optional[Union[IResolvable, config.CfnRemediationConfiguration.ExecutionControlsProperty, Dict[str, Any], None]] = None + execution_controls: Optional[ + Union[ + IResolvable, + config.CfnRemediationConfiguration.ExecutionControlsProperty, + Dict[str, Any], + None, + ] + ] = None maximum_automatic_attempts: Optional[Union[int, float, None]] = None parameters: Optional[Any] = None resource_type: Optional[str] = None retry_attempt_seconds: Union[int, float, None] = None target_version: Optional[str] = None - def __init__(self, rule_parameters: dict): param = rule_parameters["Parameters"] - reme_param = param['Remediation'] + reme_param = param["Remediation"] self.target_id = reme_param["TargetId"] self.target_type = reme_param["TargetType"] if "RuleName" in rule_parameters["Parameters"]: @@ -51,17 +58,30 @@ def __init__(self, rule_parameters: dict): self.automatic = reme_param["Automatic"] if "ExecutionControls" in reme_param: if "SsmControls" in reme_param["ExecutionControls"]: - if "ConcurrentExecutionRatePercentage" in reme_param["ExecutionControls"]["SsmControls"]: - concurrent_execution_rate_percentage = reme_param["ExecutionControls"]["SsmControls"]["ConcurrentExecutionRatePercentage"] + if ( + "ConcurrentExecutionRatePercentage" + in reme_param["ExecutionControls"]["SsmControls"] + ): + concurrent_execution_rate_percentage = reme_param[ + "ExecutionControls" + ]["SsmControls"]["ConcurrentExecutionRatePercentage"] if "ErrorPercentage" in reme_param["ExecutionControls"]["SsmControls"]: - error_percentage = reme_param["ExecutionControls"]["SsmControls"]["ErrorPercentage"] + error_percentage = reme_param["ExecutionControls"]["SsmControls"][ + "ErrorPercentage" + ] ssm_controls = config.CfnRemediationConfiguration.SsmControlsProperty( concurrent_execution_rate_percentage=concurrent_execution_rate_percentage, - error_percentage=error_percentage + error_percentage=error_percentage, + ) + self.execution_controls = ( + config.CfnRemediationConfiguration.ExecutionControlsProperty( + ssm_controls ) - self.execution_controls = config.CfnRemediationConfiguration.ExecutionControlsProperty(ssm_controls) + ) if "MaximumAutomaticAttempts" in reme_param: - self.maximum_automatic_attempts = int(reme_param["MaximumAutomaticAttempts"]) + self.maximum_automatic_attempts = int( + reme_param["MaximumAutomaticAttempts"] + ) if "Parameters" in reme_param: self.parameters = reme_param["Parameters"] if "ResourceType" in reme_param: diff --git a/rdk/frameworks/cdk/cdk/core/errors.py b/rdk/frameworks/cdk/cdk/core/errors.py index 7d8f1b30..b5239bb6 100644 --- a/rdk/frameworks/cdk/cdk/core/errors.py +++ b/rdk/frameworks/cdk/cdk/core/errors.py @@ -9,45 +9,47 @@ class RdkParametersInvalidError(Exception): from parameters.json. """ + class RdkJsonInvalidError(Exception): """ Raise invalid json error when rdk failed to decode parameters.json. """ + def __init__(self, rule_dir): - message = ( - f"Failed to decode JSON in parameters file in {rule_dir}" - ) + message = f"Failed to decode JSON in parameters file in {rule_dir}" super().__init__(message) + class RdkJsonLoadFailure(Exception): """ Raise load failure exception when rdk failed to load parameters.json. """ + def __init__(self, rule_dir): - message = ( - f"Error loading parameters file in {rule_dir}" - ) + message = f"Error loading parameters file in {rule_dir}" super().__init__(message) + class RdkRuleTypesInvalidError(Exception): """ Raise invalid source type error for non supporting types. """ + class RdkNotSupportedError(Exception): """ Raise not supporting error for not supported action. """ + class RdkDuplicatedRuleNameError(Exception): """ Raise invalid source type error for non supporting types. """ + def __init__(self, rule_paths): - message = ( - f"Found duplicated rule name in the following paths: {rule_paths}" - ) + message = f"Found duplicated rule name in the following paths: {rule_paths}" super().__init__(message) diff --git a/rdk/frameworks/cdk/cdk/core/rule_parameters.py b/rdk/frameworks/cdk/cdk/core/rule_parameters.py index 83929e7d..faafd148 100644 --- a/rdk/frameworks/cdk/cdk/core/rule_parameters.py +++ b/rdk/frameworks/cdk/cdk/core/rule_parameters.py @@ -12,17 +12,18 @@ ) rdk_supported_custom_rule_runtime = [ - "python3.7", - "python3.7-lib", - "python3.8", - "python3.8-lib", - "python3.9", - "python3.9-lib", - "python3.10", - "python3.10-lib", - # "nodejs6.10", - # "nodejs8.10", - ] + "python3.7", + "python3.7-lib", + "python3.8", + "python3.8-lib", + "python3.9", + "python3.9-lib", + "python3.10", + "python3.10-lib", + # "nodejs6.10", + # "nodejs8.10", +] + def get_rule_parameters(rule_dir: Path): parameters_txt = rule_dir.joinpath("parameters.json").read_text() @@ -37,45 +38,72 @@ def get_rule_parameters(rule_dir: Path): return validate(rule_dir, parameters_json) + def get_rule_name(rule_path: Path): rule_parameters = get_rule_parameters(rule_path) try: rule_name = rule_parameters["Parameters"]["RuleName"] except Exception as e: - raise RdkParametersInvalidError(f"Invalid parameters found in Parameters.RuleName in {rule_path}") + raise RdkParametersInvalidError( + f"Invalid parameters found in Parameters.RuleName in {rule_path}" + ) if len(rule_name) > 128: - raise RdkParametersInvalidError("Error: Found Rule with name over 128 characters: {rule_name} \n Recreate the Rule with a shorter name.") + raise RdkParametersInvalidError( + "Error: Found Rule with name over 128 characters: {rule_name} \n Recreate the Rule with a shorter name." + ) return rule_name -def get_deploy_rules_list(rules_dir: Path, deployment_mode: str = "all",): + +def get_deploy_rules_list( + rules_dir: Path, + deployment_mode: str = "all", +): deploy_rules_list = [] for path in rules_dir.absolute().glob("**/parameters.json"): if "build/" not in path.as_posix(): if deployment_mode == "all": deploy_rules_list.append(path.parent) - # Add support for java and cs + # Add support for java and cs # elif deployment_mode == "rule_names": # for path in rules_dir.absolute().glob("**/parameters.json"): # if rules_dir.absolute().joinpath("rdk").as_posix() not in path.as_posix(): # if command_arg == get_rule_name(path.parent): # rule_dir_paths.append(path.parent.as_posix()) - # if len(rule_dir_paths) > 1: + # if len(rule_dir_paths) > 1: # raise RdkDuplicatedRuleNameError(rule_dir_paths) else: - raise RdkNotSupportedError('Invalid Option: Specify Rule Name or RuleSet or empty for all.') - + raise RdkNotSupportedError( + "Invalid Option: Specify Rule Name or RuleSet or empty for all." + ) + return deploy_rules_list + def validate(rule_dir: Path, parameters_json: dict): - #TODO + # TODO latest_schema_version = "1.0" if "Parameters" not in parameters_json: raise RdkParametersInvalidError(f"Error in {rule_dir}: Missing Parameters Key") - if "Version" not in parameters_json and parameters_json["Version"] != latest_schema_version: - raise RdkParametersInvalidError(f"Error in {rule_dir}: Missing Version Key. The latest supported schema version is {latest_schema_version}") - if "SourceIdentifier" not in parameters_json["Parameters"] and "SourceRuntime" not in parameters_json["Parameters"]: - raise RdkParametersInvalidError(f"Error in {rule_dir}: Missing Parameters.SourceIdentifier or Parameters.SourceRuntime is required") - if "SourcePeriodic" not in parameters_json["Parameters"] and "SourceEvents" not in parameters_json["Parameters"]: - raise RdkParametersInvalidError(f"Error in {rule_dir}: Missing Parameters.SourcePeriodic or Parameters.SourceEvents is required") - return parameters_json \ No newline at end of file + if ( + "Version" not in parameters_json + and parameters_json["Version"] != latest_schema_version + ): + raise RdkParametersInvalidError( + f"Error in {rule_dir}: Missing Version Key. The latest supported schema version is {latest_schema_version}" + ) + if ( + "SourceIdentifier" not in parameters_json["Parameters"] + and "SourceRuntime" not in parameters_json["Parameters"] + ): + raise RdkParametersInvalidError( + f"Error in {rule_dir}: Missing Parameters.SourceIdentifier or Parameters.SourceRuntime is required" + ) + if ( + "SourcePeriodic" not in parameters_json["Parameters"] + and "SourceEvents" not in parameters_json["Parameters"] + ): + raise RdkParametersInvalidError( + f"Error in {rule_dir}: Missing Parameters.SourcePeriodic or Parameters.SourceEvents is required" + ) + return parameters_json diff --git a/rdk/frameworks/cdk/tests/unit/test_cdk_stack.py b/rdk/frameworks/cdk/tests/unit/test_cdk_stack.py index b5ad1c04..54b590fe 100644 --- a/rdk/frameworks/cdk/tests/unit/test_cdk_stack.py +++ b/rdk/frameworks/cdk/tests/unit/test_cdk_stack.py @@ -10,6 +10,7 @@ def test_sqs_queue_created(): stack = CdkStack(app, "cdk") template = assertions.Template.from_stack(stack) + # template.has_resource_properties("AWS::SQS::Queue", { # "VisibilityTimeout": 300 # }) diff --git a/rdk/runners/base.py b/rdk/runners/base.py index 857a4a5f..06537063 100644 --- a/rdk/runners/base.py +++ b/rdk/runners/base.py @@ -39,7 +39,6 @@ def __post_init__(self): # self.logger = get_testcase_logger() self.logger = get_main_logger() - # Linter notes: # * Yes pylint, we know this method is complicated. # * bandit does not like subprocess. See note at the top of this file diff --git a/rdk/runners/cdk.py b/rdk/runners/cdk.py index 1114444e..1e15ef29 100644 --- a/rdk/runners/cdk.py +++ b/rdk/runners/cdk.py @@ -34,7 +34,7 @@ def __post_init__(self): # shutil.rmtree(self.root_module / "cdk") # shutil.copytree(Path(__file__).resolve().parent.parent /'frameworks' / 'cdk', self.root_module / 'cdk') # self.cdk_app_dir = self.root_module / "cdk" - self.cdk_app_dir = Path(__file__).resolve().parent.parent /'frameworks' / 'cdk' + self.cdk_app_dir = Path(__file__).resolve().parent.parent / "frameworks" / "cdk" def synthesize(self): """ @@ -56,10 +56,9 @@ def synthesize(self): # "--asset-metadata", # "false", "--context", - "rules_dir=" + self.rules_dir.as_posix() + "rules_dir=" + self.rules_dir.as_posix(), ] - self.logger.info("Synthesizing CloudFormation template(s)...") self.run_cmd( @@ -75,13 +74,7 @@ def diff(self): Parameters: """ - cmd = [ - "cdk", - "diff", - "--context", - "rules_dir=" + self.rules_dir.as_posix() - ] - + cmd = ["cdk", "diff", "--context", "rules_dir=" + self.rules_dir.as_posix()] self.logger.info("Showing differences on CloudFormation template(s)...") @@ -102,10 +95,9 @@ def bootstrap(self): "cdk", "bootstrap", "--context", - "rules_dir=" + self.rules_dir.as_posix() + "rules_dir=" + self.rules_dir.as_posix(), ] - self.logger.info("Envrionment Bootstrapping ...") self.run_cmd( @@ -127,10 +119,9 @@ def deploy(self): "--context", "rules_dir=" + self.rules_dir.as_posix(), "--require-approval", - "never" + "never", ] - self.logger.info("Deploying AWS Config Rules ...") self.run_cmd( @@ -151,14 +142,13 @@ def destroy(self): "destroy", "--context", "rules_dir=" + self.rules_dir.as_posix(), - "--force" + "--force", ] - self.logger.info("Destroying AWS Config Rules ...") self.run_cmd( cmd=cmd, cwd=self.cdk_app_dir.as_posix(), allowed_return_codes=[0, 2], - ) \ No newline at end of file + ) diff --git a/rdk/runners/cfn_guard.py b/rdk/runners/cfn_guard.py index 60db0757..f33e0329 100644 --- a/rdk/runners/cfn_guard.py +++ b/rdk/runners/cfn_guard.py @@ -42,18 +42,17 @@ def test(self): "test", "--rules-file", self.rules_file.as_posix(), - "--test-data", - self.test_data.as_posix() + "--test-data", + self.test_data.as_posix(), ] if self.verbose: cmd.append("--verbose") - self.logger.info(f"Running cfn-guard unit test on {self.rules_file.relative_to(self.rules_file.parent.parent)} with testing data: {self.test_data.relative_to(self.rules_file.parent.parent)}") + self.logger.info( + f"Running cfn-guard unit test on {self.rules_file.relative_to(self.rules_file.parent.parent)} with testing data: {self.test_data.relative_to(self.rules_file.parent.parent)}" + ) return self.run_cmd( - cmd=cmd, - cwd=Path().absolute().as_posix(), - capture_output=True + cmd=cmd, cwd=Path().absolute().as_posix(), capture_output=True ) - diff --git a/requirements.txt b/requirements.txt index d131e82c..bed58f12 100644 --- a/requirements.txt +++ b/requirements.txt @@ -8,6 +8,8 @@ aws-cdk.asset-kubectl-v20==2.1.1 ; python_version ~= '3.7' aws-cdk.asset-node-proxy-agent-v5==2.0.105 ; python_version ~= '3.7' cattrs==22.2.0 ; python_version >= '3.7' certifi==2022.12.7 ; python_version >= '3.6' +# cfn-guard - while cfn-guard must be installed, there is no pip installer for it as of April 2023 +# Install guide for cfn-guard: https://docs.aws.amazon.com/cfn-guard/latest/ug/setting-up-linux.html colorlog==4.8.0 constructs==10.1.302 exceptiongroup==1.1.1 ; python_version < '3.11' diff --git a/tests/integration/rdk-cdk-rule-dir/AMI_DEPRECATED_CHECK/AMI_DEPRECATED_CHECK.py b/tests/integration/rdk-cdk-rule-dir/AMI_DEPRECATED_CHECK/AMI_DEPRECATED_CHECK.py index 29b7a009..79d8bbbd 100644 --- a/tests/integration/rdk-cdk-rule-dir/AMI_DEPRECATED_CHECK/AMI_DEPRECATED_CHECK.py +++ b/tests/integration/rdk-cdk-rule-dir/AMI_DEPRECATED_CHECK/AMI_DEPRECATED_CHECK.py @@ -11,37 +11,40 @@ APPLICABLE_RESOURCES = ["AWS::AutoScaling::AutoScalingGroup", "AWS::EC2::Instance"] DEFAULT_RESOURCE_TYPE = "AWS::EC2::Instance" + class AMI_DEPRECATED_CHECK(ConfigRule): - def evaluate_change(self, event, client_factory, configuration_item, valid_rule_parameters): + def evaluate_change( + self, event, client_factory, configuration_item, valid_rule_parameters + ): pass def evaluate_periodic(self, event, client_factory, valid_rule_parameters): ec2_client = client_factory.build_client("ec2") asg_client = client_factory.build_client("autoscaling") - mode = valid_rule_parameters['mode'] - if mode == 'ASG': + mode = valid_rule_parameters["mode"] + if mode == "ASG": return self.evaluate_asgs(ec2_client, asg_client) return self.evaluate_instances(ec2_client) def evaluate_parameters(self, rule_parameters): valid_rule_parameters = rule_parameters - if 'mode' not in rule_parameters: - valid_rule_parameters['mode'] = 'EC2' - if valid_rule_parameters['mode'] not in ('EC2', 'ASG'): - raise ValueError('Rule only supports parameter mode of EC2 and ASG') + if "mode" not in rule_parameters: + valid_rule_parameters["mode"] = "EC2" + if valid_rule_parameters["mode"] not in ("EC2", "ASG"): + raise ValueError("Rule only supports parameter mode of EC2 and ASG") return valid_rule_parameters def evaluate_instances(self, ec2_client): evaluations = [] instances = get_all_instances(ec2_client) for instance in instances: - ami_id = instance['ImageId'] + ami_id = instance["ImageId"] compliance_type, annotation = self.evaluate_ami(ec2_client, ami_id) evaluation = Evaluation( - resourceType='AWS::EC2::Instance', - resourceId=instance['InstanceId'], + resourceType="AWS::EC2::Instance", + resourceId=instance["InstanceId"], complianceType=compliance_type, annotation=annotation, ) @@ -57,8 +60,8 @@ def evaluate_asgs(self, ec2_client, asg_client): compliance_type, annotation = self.evaluate_ami(ec2_client, ami_id) evaluation = Evaluation( - resourceType='AWS::AutoScaling::AutoScalingGroup', - resourceId=asg['AutoScalingGroupName'], + resourceType="AWS::AutoScaling::AutoScalingGroup", + resourceId=asg["AutoScalingGroupName"], complianceType=compliance_type, annotation=annotation, ) @@ -68,79 +71,96 @@ def evaluate_asgs(self, ec2_client, asg_client): def evaluate_ami(self, ec2_client, ami_id): if not ami_id: - print(f'AMI {ami_id} is None, assuming deprecated/unshared/deleted') - return ComplianceType.NON_COMPLIANT, f'Image {ami_id} is either unshared or deleted' + print(f"AMI {ami_id} is None, assuming deprecated/unshared/deleted") + return ( + ComplianceType.NON_COMPLIANT, + f"Image {ami_id} is either unshared or deleted", + ) try: response = ec2_client.describe_images( ImageIds=[ami_id], IncludeDeprecated=True, ) - image = response['Images'][0] - if 'DeprecationTime' not in image: - return ComplianceType.COMPLIANT, f'Image {ami_id} is not deprecated' - deprecation_time = datetime.strptime(image['DeprecationTime'], '%Y-%m-%dT%H:%M:%S.%fZ') + image = response["Images"][0] + if "DeprecationTime" not in image: + return ComplianceType.COMPLIANT, f"Image {ami_id} is not deprecated" + deprecation_time = datetime.strptime( + image["DeprecationTime"], "%Y-%m-%dT%H:%M:%S.%fZ" + ) current_time = datetime.utcnow() if deprecation_time < current_time: - return ComplianceType.NON_COMPLIANT, f'Image {ami_id} is deprecated' - return ComplianceType.COMPLIANT, f'Image {ami_id} is not deprecated' + return ComplianceType.NON_COMPLIANT, f"Image {ami_id} is deprecated" + return ComplianceType.COMPLIANT, f"Image {ami_id} is not deprecated" except Exception as e: - print(f'Exception checking {ami_id}, assuming deprecated/unshared/deleted: {e}') - return ComplianceType.NON_COMPLIANT, f'Error checking {ami_id}, assuming noncompliant' + print( + f"Exception checking {ami_id}, assuming deprecated/unshared/deleted: {e}" + ) + return ( + ComplianceType.NON_COMPLIANT, + f"Error checking {ami_id}, assuming noncompliant", + ) def get_ami_from_asg(asg_client, ec2_client, asg): # asg is the individual asg metadata from the AWS API try: - if 'MixedInstancesPolicy' in asg: - launch_template_spec = asg['MixedInstancesPolicy']['LaunchTemplate'] \ - ['LaunchTemplateSpecification'] + if "MixedInstancesPolicy" in asg: + launch_template_spec = asg["MixedInstancesPolicy"]["LaunchTemplate"][ + "LaunchTemplateSpecification" + ] response = ec2_client.describe_launch_template_versions( - LaunchTemplateId = launch_template_spec['LaunchTemplateId'], - Versions = [launch_template_spec['Version']] + LaunchTemplateId=launch_template_spec["LaunchTemplateId"], + Versions=[launch_template_spec["Version"]], ) - return response['LaunchTemplateVersions'][0]['LaunchTemplateData']['ImageId'] - elif 'LaunchTemplate' in asg: - launch_template_spec = asg['LaunchTemplate'] + return response["LaunchTemplateVersions"][0]["LaunchTemplateData"][ + "ImageId" + ] + elif "LaunchTemplate" in asg: + launch_template_spec = asg["LaunchTemplate"] response = ec2_client.describe_launch_template_versions( - LaunchTemplateId = launch_template_spec['LaunchTemplateId'], - Versions = [launch_template_spec['Version']] + LaunchTemplateId=launch_template_spec["LaunchTemplateId"], + Versions=[launch_template_spec["Version"]], ) - return response['LaunchTemplateVersions'][0]['LaunchTemplateData']['ImageId'] + return response["LaunchTemplateVersions"][0]["LaunchTemplateData"][ + "ImageId" + ] else: - launch_config_name = asg['LaunchConfigurationName'] + launch_config_name = asg["LaunchConfigurationName"] response = asg_client.describe_launch_configurations( - LaunchConfigurationNames = [launch_config_name] + LaunchConfigurationNames=[launch_config_name] ) - return response['LaunchConfigurations'][0]['ImageId'] + return response["LaunchConfigurations"][0]["ImageId"] except Exception as e: - asg_name = asg.get('AutoScalingGroupName', 'Unknown') - print(f'Error retrieving AMI from ASG {asg_name}: {e}') + asg_name = asg.get("AutoScalingGroupName", "Unknown") + print(f"Error retrieving AMI from ASG {asg_name}: {e}") return None + def get_all_asgs(asg_client): asgs = [] response = asg_client.describe_auto_scaling_groups() - asgs.extend(response['AutoScalingGroups']) - while 'NextToken' in response: - response = asg_client.describe_auto_scaling_groups(response['NextToken']) - asgs.extend(response['AutoScalingGroups']) + asgs.extend(response["AutoScalingGroups"]) + while "NextToken" in response: + response = asg_client.describe_auto_scaling_groups(response["NextToken"]) + asgs.extend(response["AutoScalingGroups"]) return asgs + def get_all_instances(ec2_client): instances = [] # Get all instances with pagination response = ec2_client.describe_instances( Filters=[ { - 'Name': 'instance-state-name', - 'Values': ['pending', 'running', 'stopping', 'stopped'] + "Name": "instance-state-name", + "Values": ["pending", "running", "stopping", "stopped"], }, ], ) for reservation in response["Reservations"]: instances.extend(reservation["Instances"]) - while 'NextToken' in response: - response = ec2_client.describe_instances(NextToken=response['NextToken']) + while "NextToken" in response: + response = ec2_client.describe_instances(NextToken=response["NextToken"]) for reservation in response["Reservations"]: instances.extend(reservation["Instances"]) return instances @@ -152,4 +172,4 @@ def get_all_instances(ec2_client): def lambda_handler(event, context): my_rule = AMI_DEPRECATED_CHECK() evaluator = Evaluator(my_rule) - return evaluator.handle(event, context) \ No newline at end of file + return evaluator.handle(event, context) diff --git a/tests/integration/rdk-cdk-rule-dir/AMI_DEPRECATED_CHECK/AMI_DEPRECATED_CHECK_test.py b/tests/integration/rdk-cdk-rule-dir/AMI_DEPRECATED_CHECK/AMI_DEPRECATED_CHECK_test.py index ad0ff3b3..92da0ae9 100644 --- a/tests/integration/rdk-cdk-rule-dir/AMI_DEPRECATED_CHECK/AMI_DEPRECATED_CHECK_test.py +++ b/tests/integration/rdk-cdk-rule-dir/AMI_DEPRECATED_CHECK/AMI_DEPRECATED_CHECK_test.py @@ -16,18 +16,19 @@ # Main Code # ############# -MODULE = __import__('AMI_DEPRECATED_CHECK') +MODULE = __import__("AMI_DEPRECATED_CHECK") RULE = MODULE.AMI_DEPRECATED_CHECK() -#example for mocking S3 API calls +# example for mocking S3 API calls CLIENT_FACTORY = MagicMock() EC2_CLIENT_MOCK = MagicMock() ASG_CLIENT_MOCK = MagicMock() + def mock_get_client(client_name, *args, **kwargs): if client_name == "ec2": return EC2_CLIENT_MOCK - elif client_name == 'autoscaling': + elif client_name == "autoscaling": return ASG_CLIENT_MOCK raise Exception("Attempting to create an unknown client") @@ -45,7 +46,7 @@ class ComplianceTest(unittest.TestCase): "CreationDate": "2021-07-19T19:03:00.000Z", "ImageId": "ami-abcd1234", "Name": "test-image", - "DeprecationTime": "2021-07-21T17:03:00.000Z" + "DeprecationTime": "2021-07-21T17:03:00.000Z", } ] } @@ -55,23 +56,18 @@ class ComplianceTest(unittest.TestCase): { "CreationDate": "2021-07-01T19:03:00.000Z", "ImageId": "ami-abcd1234", - "Name": "test-image" + "Name": "test-image", } ] } - missing_ami_response = {'Images': []} + missing_ami_response = {"Images": []} instance_response = { "Reservations": [ { "Groups": [], - "Instances": [ - { - "ImageId": "ami-abcd1234", - "InstanceId": "i-abcd1234" - } - ] + "Instances": [{"ImageId": "ami-abcd1234", "InstanceId": "i-abcd1234"}], } ] } @@ -83,8 +79,8 @@ class ComplianceTest(unittest.TestCase): "LaunchTemplate": { "LaunchTemplateId": "lt-xyz789", "LaunchTemplateName": "test-lt", - "Version": "1" - } + "Version": "1", + }, } ] } @@ -98,42 +94,34 @@ class ComplianceTest(unittest.TestCase): "LaunchTemplateSpecification": { "LaunchTemplateId": "lt-xyz789", "LaunchTemplateName": "test-lt", - "Version": 1 + "Version": 1, } } - } + }, } ] } asg_launch_config = { "AutoScalingGroups": [ - { - "AutoScalingGroupName": "test-asg", - "LaunchConfigurationName": "test-lc" - } + {"AutoScalingGroupName": "test-asg", "LaunchConfigurationName": "test-lc"} ] } launch_template_versions = { - 'LaunchTemplateVersions': [ + "LaunchTemplateVersions": [ { - 'LaunchTemplateData': { - 'ImageId': 'ami-6057e21a' - }, - 'LaunchTemplateId': "lt-xyz789", - 'LaunchTemplateName': "test-lt", - 'VersionNumber': 2, + "LaunchTemplateData": {"ImageId": "ami-6057e21a"}, + "LaunchTemplateId": "lt-xyz789", + "LaunchTemplateName": "test-lt", + "VersionNumber": 2, } ] } launch_config = { "LaunchConfigurations": [ - { - "LaunchConfigurationName": "test-lc", - "ImageId": "ami-abcd1234" - } + {"LaunchConfigurationName": "test-lc", "ImageId": "ami-abcd1234"} ] } @@ -144,86 +132,113 @@ def setUp(self): def test_evaluate_compliant_instance(self): EC2_CLIENT_MOCK.describe_instances.return_value = self.instance_response EC2_CLIENT_MOCK.describe_images.return_value = self.compliant_ami_response - response = RULE.evaluate_periodic({}, CLIENT_FACTORY, {'mode': 'EC2'}) - instance = self.instance_response['Reservations'][0]['Instances'][0] - response_expected = [Evaluation( - complianceType=ComplianceType.COMPLIANT, - resourceId=instance['InstanceId'], - resourceType='AWS::EC2::Instance', - annotation=f'Image {instance["ImageId"]} is not deprecated' - )] + response = RULE.evaluate_periodic({}, CLIENT_FACTORY, {"mode": "EC2"}) + instance = self.instance_response["Reservations"][0]["Instances"][0] + response_expected = [ + Evaluation( + complianceType=ComplianceType.COMPLIANT, + resourceId=instance["InstanceId"], + resourceType="AWS::EC2::Instance", + annotation=f'Image {instance["ImageId"]} is not deprecated', + ) + ] assert_successful_evaluation(self, response, response_expected) def test_evaluate_noncompliant_instance_deprecated_ami(self): EC2_CLIENT_MOCK.describe_instances.return_value = self.instance_response EC2_CLIENT_MOCK.describe_images.return_value = self.deprecated_ami_response - response = RULE.evaluate_periodic({}, CLIENT_FACTORY, {'mode': 'EC2'}) - instance = self.instance_response['Reservations'][0]['Instances'][0] - response_expected = [Evaluation( - complianceType=ComplianceType.NON_COMPLIANT, - resourceId=instance['InstanceId'], - resourceType='AWS::EC2::Instance', - annotation=f'Image {instance["ImageId"]} is deprecated' - )] + response = RULE.evaluate_periodic({}, CLIENT_FACTORY, {"mode": "EC2"}) + instance = self.instance_response["Reservations"][0]["Instances"][0] + response_expected = [ + Evaluation( + complianceType=ComplianceType.NON_COMPLIANT, + resourceId=instance["InstanceId"], + resourceType="AWS::EC2::Instance", + annotation=f'Image {instance["ImageId"]} is deprecated', + ) + ] assert_successful_evaluation(self, response, response_expected) def test_evaluate_noncompliant_instance_missing_ami(self): EC2_CLIENT_MOCK.describe_instances.return_value = self.instance_response EC2_CLIENT_MOCK.describe_images.return_value = self.missing_ami_response - response = RULE.evaluate_periodic({}, CLIENT_FACTORY, {'mode': 'EC2'}) - instance = self.instance_response['Reservations'][0]['Instances'][0] - response_expected = [Evaluation( - complianceType=ComplianceType.NON_COMPLIANT, - resourceId=instance['InstanceId'], - resourceType='AWS::EC2::Instance', - annotation=f'Error checking {instance["ImageId"]}, assuming noncompliant' - )] + response = RULE.evaluate_periodic({}, CLIENT_FACTORY, {"mode": "EC2"}) + instance = self.instance_response["Reservations"][0]["Instances"][0] + response_expected = [ + Evaluation( + complianceType=ComplianceType.NON_COMPLIANT, + resourceId=instance["InstanceId"], + resourceType="AWS::EC2::Instance", + annotation=f'Error checking {instance["ImageId"]}, assuming noncompliant', + ) + ] assert_successful_evaluation(self, response, response_expected) def test_evaluate_asg_mixed_instances_launch_template_compliant(self): - ASG_CLIENT_MOCK.describe_auto_scaling_groups.return_value = self.asg_mixed_instances - EC2_CLIENT_MOCK.describe_launch_template_versions.return_value = self.launch_template_versions + ASG_CLIENT_MOCK.describe_auto_scaling_groups.return_value = ( + self.asg_mixed_instances + ) + EC2_CLIENT_MOCK.describe_launch_template_versions.return_value = ( + self.launch_template_versions + ) EC2_CLIENT_MOCK.describe_images.return_value = self.compliant_ami_response - response = RULE.evaluate_periodic({}, CLIENT_FACTORY, {'mode': 'ASG'}) - asg = self.asg_mixed_instances['AutoScalingGroups'][0] - launch_template_version = self.launch_template_versions['LaunchTemplateVersions'][0] - response_expected = [Evaluation( - complianceType=ComplianceType.COMPLIANT, - resourceId=asg['AutoScalingGroupName'], - resourceType='AWS::AutoScaling::AutoScalingGroup', - annotation=f'Image {launch_template_version["LaunchTemplateData"]["ImageId"]} is not deprecated' - )] + response = RULE.evaluate_periodic({}, CLIENT_FACTORY, {"mode": "ASG"}) + asg = self.asg_mixed_instances["AutoScalingGroups"][0] + launch_template_version = self.launch_template_versions[ + "LaunchTemplateVersions" + ][0] + response_expected = [ + Evaluation( + complianceType=ComplianceType.COMPLIANT, + resourceId=asg["AutoScalingGroupName"], + resourceType="AWS::AutoScaling::AutoScalingGroup", + annotation=f'Image {launch_template_version["LaunchTemplateData"]["ImageId"]} is not deprecated', + ) + ] assert_successful_evaluation(self, response, response_expected) def test_evaluate_noncompliant_asg_launch_config_deprecated_ami(self): - ASG_CLIENT_MOCK.describe_auto_scaling_groups.return_value = self.asg_launch_config + ASG_CLIENT_MOCK.describe_auto_scaling_groups.return_value = ( + self.asg_launch_config + ) ASG_CLIENT_MOCK.describe_launch_configurations.return_value = self.launch_config EC2_CLIENT_MOCK.describe_images.return_value = self.deprecated_ami_response - response = RULE.evaluate_periodic({}, CLIENT_FACTORY, {'mode': 'ASG'}) - asg = self.asg_launch_config['AutoScalingGroups'][0] - launch_config = self.launch_config['LaunchConfigurations'][0] - response_expected = [Evaluation( - complianceType=ComplianceType.NON_COMPLIANT, - resourceId=asg['AutoScalingGroupName'], - resourceType='AWS::AutoScaling::AutoScalingGroup', - annotation=f'Image {launch_config["ImageId"]} is deprecated' - )] + response = RULE.evaluate_periodic({}, CLIENT_FACTORY, {"mode": "ASG"}) + asg = self.asg_launch_config["AutoScalingGroups"][0] + launch_config = self.launch_config["LaunchConfigurations"][0] + response_expected = [ + Evaluation( + complianceType=ComplianceType.NON_COMPLIANT, + resourceId=asg["AutoScalingGroupName"], + resourceType="AWS::AutoScaling::AutoScalingGroup", + annotation=f'Image {launch_config["ImageId"]} is deprecated', + ) + ] assert_successful_evaluation(self, response, response_expected) def test_evaluate_noncompliant_asg_launch_template_missing_ami(self): - ASG_CLIENT_MOCK.describe_auto_scaling_groups.return_value = self.asg_launch_template - ASG_CLIENT_MOCK.describe_launch_template_versions.return_value = self.launch_template_versions + ASG_CLIENT_MOCK.describe_auto_scaling_groups.return_value = ( + self.asg_launch_template + ) + ASG_CLIENT_MOCK.describe_launch_template_versions.return_value = ( + self.launch_template_versions + ) EC2_CLIENT_MOCK.describe_images.return_value = self.missing_ami_response - response = RULE.evaluate_periodic({}, CLIENT_FACTORY, {'mode': 'ASG'}) - asg = self.asg_launch_template['AutoScalingGroups'][0] - launch_template_version = self.launch_template_versions['LaunchTemplateVersions'][0] - response_expected = [Evaluation( - complianceType=ComplianceType.NON_COMPLIANT, - resourceId=asg['AutoScalingGroupName'], - resourceType='AWS::AutoScaling::AutoScalingGroup', - annotation=f'Error checking {launch_template_version["LaunchTemplateData"]["ImageId"]}, assuming noncompliant' - )] + response = RULE.evaluate_periodic({}, CLIENT_FACTORY, {"mode": "ASG"}) + asg = self.asg_launch_template["AutoScalingGroups"][0] + launch_template_version = self.launch_template_versions[ + "LaunchTemplateVersions" + ][0] + response_expected = [ + Evaluation( + complianceType=ComplianceType.NON_COMPLIANT, + resourceId=asg["AutoScalingGroupName"], + resourceType="AWS::AutoScaling::AutoScalingGroup", + annotation=f'Error checking {launch_template_version["LaunchTemplateData"]["ImageId"]}, assuming noncompliant', + ) + ] assert_successful_evaluation(self, response, response_expected) -if __name__ == '__main__': - unittest.main() \ No newline at end of file + +if __name__ == "__main__": + unittest.main() diff --git a/tests/unit/conftest.py b/tests/unit/conftest.py index 5423a1db..f702b5b5 100644 --- a/tests/unit/conftest.py +++ b/tests/unit/conftest.py @@ -12,4 +12,4 @@ # Enable debug logs for rdk logger = rdk_logger.get_main_logger() -rdk_logger.update_stream_handler_level(logger=logger, level=logging.DEBUG) \ No newline at end of file +rdk_logger.update_stream_handler_level(logger=logger, level=logging.DEBUG) diff --git a/tests/unit/core/test_errors.py b/tests/unit/core/test_errors.py index de52cc25..3d712471 100644 --- a/tests/unit/core/test_errors.py +++ b/tests/unit/core/test_errors.py @@ -15,24 +15,16 @@ def test_errors_hierarchy(): assert issubclass(rdk_errors.RdkCommandNotAllowedError, rdk_errors.RdkError) assert issubclass(rdk_errors.RdkCustodianPolicyReadError, rdk_errors.RdkError) - assert issubclass( - rdk_errors.RdkCustodianUnsupportedModeError, rdk_errors.RdkError - ) - assert issubclass( - rdk_errors.RdkCustodianLambdaMonitorError, rdk_errors.RdkError - ) + assert issubclass(rdk_errors.RdkCustodianUnsupportedModeError, rdk_errors.RdkError) + assert issubclass(rdk_errors.RdkCustodianLambdaMonitorError, rdk_errors.RdkError) assert issubclass(rdk_errors.RdkCustodianLambdaInvokeError, rdk_errors.RdkError) assert issubclass(rdk_errors.RdkMalformedPlanFile, rdk_errors.RdkError) assert issubclass(rdk_errors.RdkPyTestFixtureInitError, rdk_errors.RdkError) assert issubclass(rdk_errors.RdkTerraformMalformedPlanData, rdk_errors.RdkError) - assert issubclass( - rdk_errors.RdkTerraformMalformedStateData, rdk_errors.RdkError - ) - assert issubclass( - rdk_errors.RdkTerraformAvenueDownloadError, rdk_errors.RdkError - ) + assert issubclass(rdk_errors.RdkTerraformMalformedStateData, rdk_errors.RdkError) + assert issubclass(rdk_errors.RdkTerraformAvenueDownloadError, rdk_errors.RdkError) assert issubclass( rdk_errors.RdkReportUploadInvalidEnvironmentError, rdk_errors.RdkError diff --git a/tests/unit/runners/test_base.py b/tests/unit/runners/test_base.py index ffa51d38..75738ff6 100644 --- a/tests/unit/runners/test_base.py +++ b/tests/unit/runners/test_base.py @@ -48,7 +48,7 @@ def test_run_cmd_basic(mocker: MockerFixture): }, ) subprocess_popen_mock.assert_called_with( - args=["cdk", "--version"],, + args=["cdk", "--version"], stdin=subprocess.DEVNULL, stdout=subprocess.PIPE, stderr=subprocess.PIPE, @@ -63,13 +63,15 @@ def test_run_cmd_basic(mocker: MockerFixture): subprocess_popen_mock.reset_mock(return_value=True, side_effect=True) subprocess_popen_mock.side_effect = FileNotFoundError("File foo does not exist") with pytest.raises(RdkCommandInvokeError): - runner.run_cmd(cmd=["cdk", "--version"],) + runner.run_cmd( + cmd=["cdk", "--version"], + ) # Test return codes subprocess_popen_mock.reset_mock(return_value=True, side_effect=True) subprocess_popen_mock.return_value.__enter__().returncode = 2 with pytest.raises(RdkCommandExecutionError): - runner.run_cmd(cmd=["cdk", "--version"],, allowed_return_codes=[1]) + runner.run_cmd(cmd=["cdk", "--version"], allowed_return_codes=[1]) def test_run_cmd_logging( From fae2782ebe31f752a88c8f22d0d1c6e92f183947 Mon Sep 17 00:00:00 2001 From: Benjamin Morris Date: Mon, 8 May 2023 18:19:10 -0700 Subject: [PATCH 13/23] windows updates --- .gitignore | 6 + README.md | 29 +- cdk.out/CdkStack.assets.json | 19 ++ cdk.out/CdkStack.template.json | 314 +++++++++++++++++ cdk.out/cdk.out | 1 + cdk.out/manifest.json | 167 +++++++++ cdk.out/tree.json | 165 +++++++++ rdk/cli/__init__.py | 11 + rdk/cli/commands/__init__.py | 1 + rdk/cli/commands/clean.py | 14 + rdk/cli/commands/create.py | 14 + rdk/cli/commands/create_region_set.py | 0 rdk/cli/commands/create_rule_template.py | 14 + rdk/cli/commands/deploy_organization.py | 14 + rdk/cli/commands/destroy.py | 1 + rdk/cli/commands/export.py | 0 rdk/cli/commands/logs.py | 0 rdk/cli/commands/modify.py | 0 rdk/cli/commands/rulesets.py | 0 rdk/cli/commands/sample_ci.py | 22 ++ rdk/cli/commands/test.py | 1 + rdk/cli/commands/undeploy_organization.py | 0 rdk/cli/main.py | 21 ++ rdk/core/get_accepted_resource_types.py | 317 ++++++++++++++++++ rdk/core/sample_ci.py | 37 ++ .../templates/ci_examples/AWS_S3_Bucket.json | 47 +++ rdk/frameworks/cdk/app.py | 2 +- .../cdk/cdk/core/rule_parameters.py | 4 +- rdk/runners/base.py | 9 + rdk/runners/cdk.py | 20 +- tox.ini | 2 + 31 files changed, 1244 insertions(+), 8 deletions(-) create mode 100644 cdk.out/CdkStack.assets.json create mode 100644 cdk.out/CdkStack.template.json create mode 100644 cdk.out/cdk.out create mode 100644 cdk.out/manifest.json create mode 100644 cdk.out/tree.json create mode 100644 rdk/cli/commands/clean.py create mode 100644 rdk/cli/commands/create.py create mode 100644 rdk/cli/commands/create_region_set.py create mode 100644 rdk/cli/commands/create_rule_template.py create mode 100644 rdk/cli/commands/deploy_organization.py create mode 100644 rdk/cli/commands/export.py create mode 100644 rdk/cli/commands/logs.py create mode 100644 rdk/cli/commands/modify.py create mode 100644 rdk/cli/commands/rulesets.py create mode 100644 rdk/cli/commands/sample_ci.py create mode 100644 rdk/cli/commands/undeploy_organization.py create mode 100644 rdk/core/get_accepted_resource_types.py create mode 100644 rdk/core/sample_ci.py create mode 100644 rdk/core/templates/ci_examples/AWS_S3_Bucket.json create mode 100644 tox.ini diff --git a/.gitignore b/.gitignore index 6ee6b8c0..08ee0e3e 100644 --- a/.gitignore +++ b/.gitignore @@ -248,6 +248,7 @@ ipython_config.py # having no cross-platform support, pipenv may install dependencies that don't work, or not # install all needed dependencies. #Pipfile.lock +**.lock # PEP 582; used by e.g. github.com/David-OConnor/pyflow __pypackages__/ @@ -267,6 +268,7 @@ venv/ ENV/ env.bak/ venv.bak/ +myenv/ # Spyder project settings .spyderproject @@ -364,3 +366,7 @@ docs/reference/cli.md docs/CHANGELOG.md ############################################################################### + + +# CDK +**/cdk.out/ \ No newline at end of file diff --git a/README.md b/README.md index db61993d..1a1bf525 100644 --- a/README.md +++ b/README.md @@ -1,7 +1,34 @@ +# Summary + +This branch of RDK is for the alpha-testing of RDK v1.0. + +RDK v1.0 will feature several changes to make RDK more useful and maintainable in the long term. The top changes are: +- Support for CfnGuard Rules +- Changing back-end deployment methodology from CloudFormation to CDK +- Refactoring the monolithic `rdk.py` file into individual files for each RDK command. + +Because these changes have the potential to be breaking changes, this will initially be released using a non-semantic version (eg. alpha-1.0.0) so that existing RDK pipelines are not impacted. + +# TODO + +Add README.md from RDK v0 here. + # Developer Instructions -These steps are used for developers who want to make and test changes to the RDK source code. +These steps are used for developers who want to make and test changes to the RDK source code and compile an RDK executable. + +You can also run `python -m rdk` from the root directory to run RDK from the script (will be slow to run). + +You can attach the CLI to the debugger using `python -m debugpy --listen 5678 rdk deploy` + +CDK may attempt to run `python3`, which could cause issues on Windows systems where Python is often just named `python.exe`. Copying `python.exe` to `python3.exe` is a workaround for this issue. + +## Windows venv instructions +- `virtualenv myenv` +- `myenv\Scripts\activate` + +Note: if using a virtual environment on Windows, you may need to `pip install -r requirements.txt` outside of your venv as well. ## Prerequisites diff --git a/cdk.out/CdkStack.assets.json b/cdk.out/CdkStack.assets.json new file mode 100644 index 00000000..0f012846 --- /dev/null +++ b/cdk.out/CdkStack.assets.json @@ -0,0 +1,19 @@ +{ + "version": "31.0.0", + "files": { + "1f66503cc1455d71cbdf4d10ceddc706d2016dc01b701a1e11d88204bc108317": { + "source": { + "path": "CdkStack.template.json", + "packaging": "file" + }, + "destinations": { + "current_account-current_region": { + "bucketName": "cdk-hnb659fds-assets-${AWS::AccountId}-${AWS::Region}", + "objectKey": "1f66503cc1455d71cbdf4d10ceddc706d2016dc01b701a1e11d88204bc108317.json", + "assumeRoleArn": "arn:${AWS::Partition}:iam::${AWS::AccountId}:role/cdk-hnb659fds-file-publishing-role-${AWS::AccountId}-${AWS::Region}" + } + } + } + }, + "dockerImages": {} +} \ No newline at end of file diff --git a/cdk.out/CdkStack.template.json b/cdk.out/CdkStack.template.json new file mode 100644 index 00000000..204a2811 --- /dev/null +++ b/cdk.out/CdkStack.template.json @@ -0,0 +1,314 @@ +{ + "Resources": { + "APIGATEWAYPRIVATEC8B60F10": { + "Type": "AWS::Config::ConfigRule", + "Properties": { + "Source": { + "CustomPolicyDetails": { + "EnableDebugLogDelivery": false, + "PolicyRuntime": "guard-2.x.x", + "PolicyText": "#\n# Select all AWS::ApiGateway::RestApi resources\n# present in the Resources section of the template. \n#\nlet api_gws = Resources.*[ Type == 'AWS::ApiGateway::RestApi']\n\n#\n# Rule intent: \n# 1) All AWS::ApiGateway::RestApi resources deployed must be private. \n# 2) All AWS::ApiGateway::RestApi resources deployed must have at least one AWS Identity and Access Management (IAM) policy condition key to allow access from a VPC.\n#\n# Expectations: \n# 1) SKIP when there are no AWS::ApiGateway::RestApi resources in the template. \n# 2) PASS when:\n# ALL AWS::ApiGateway::RestApi resources in the template have the EndpointConfiguration property set to Type: PRIVATE. \n# ALL AWS::ApiGateway::RestApi resources in the template have one IAM condition key specified in the Policy property with aws:sourceVpc or :SourceVpc. \n# 3) FAIL otherwise. \n#\n#\n\nrule check_rest_api_is_private when %api_gws !empty { \n %api_gws {\n Properties.EndpointConfiguration.Types[*] == \"PRIVATE\" \n } \n} \n\nrule check_rest_api_has_vpc_access when check_rest_api_is_private {\n %api_gws {\n Properties {\n #\n # ALL AWS::ApiGateway::RestApi resources in the template have one IAM condition key specified in the Policy property with \n # aws:sourceVpc or :SourceVpc\n # \n some Policy.Statement[*] {\n Condition.*[ keys == /aws:[sS]ource(Vpc|VPC|Vpce|VPCE)/ ] !empty\n }\n }\n }\n}" + }, + "Owner": "CUSTOM_POLICY", + "SourceDetails": [ + { + "EventSource": "aws.config", + "MessageType": "ConfigurationItemChangeNotification" + }, + { + "EventSource": "aws.config", + "MessageType": "OversizedConfigurationItemChangeNotification" + } + ] + }, + "ConfigRuleName": "API_GATEWAY_PRIVATE", + "Description": "API_GATEWAY_PRIVATE", + "InputParameters": {} + }, + "Metadata": { + "aws:cdk:path": "CdkStack/API_GATEWAY_PRIVATE/Resource" + } + }, + "APIGATEWAYPRIVATERemediationConfiguration": { + "Type": "AWS::Config::RemediationConfiguration", + "Properties": { + "ConfigRuleName": "API_GATEWAY_PRIVATE", + "TargetId": "AWS-PublishSNSNotification", + "TargetType": "SSM_DOCUMENT", + "Automatic": true, + "MaximumAutomaticAttempts": 2, + "Parameters": { + "AutomationAssumeRole": { + "StaticValue": { + "Values": [ + { + "Fn::Sub": "arn:aws:iam::${AWS::AccountId}:role/sns-access" + } + ] + } + }, + "Message": { + "StaticValue": { + "Values": [ + "hi" + ] + } + }, + "TopicArn": { + "StaticValue": { + "Values": [ + { + "Fn::Sub": "arn:aws:sns:${AWS::Region}:${AWS::AccountId}:rules-notification" + } + ] + } + } + }, + "ResourceType": "AWS::EC2::Instance", + "RetryAttemptSeconds": 2, + "TargetVersion": "1" + }, + "Metadata": { + "aws:cdk:path": "CdkStack/API_GATEWAY_PRIVATERemediationConfiguration" + } + }, + "CDKMetadata": { + "Type": "AWS::CDK::Metadata", + "Properties": { + "Analytics": "v2:deflate64:H4sIAAAAAAAA/y2LQQ7CIBBFz9J9Ga1d6J4LNHgAg0B1LDAJDDFN07tXS1c/7738C1xvcG70NwtjJ+HxCcudtZnan3oYiiO+YJElM4WBPJq5lWOUu1fFuz8pF5xFzUhHKGmHtcZMJRl33CzWMsz8pnjqoeugbz4ZUaQSGYMDVXcDBAyx15oAAAA=" + }, + "Metadata": { + "aws:cdk:path": "CdkStack/CDKMetadata/Default" + }, + "Condition": "CDKMetadataAvailable" + } + }, + "Conditions": { + "CDKMetadataAvailable": { + "Fn::Or": [ + { + "Fn::Or": [ + { + "Fn::Equals": [ + { + "Ref": "AWS::Region" + }, + "af-south-1" + ] + }, + { + "Fn::Equals": [ + { + "Ref": "AWS::Region" + }, + "ap-east-1" + ] + }, + { + "Fn::Equals": [ + { + "Ref": "AWS::Region" + }, + "ap-northeast-1" + ] + }, + { + "Fn::Equals": [ + { + "Ref": "AWS::Region" + }, + "ap-northeast-2" + ] + }, + { + "Fn::Equals": [ + { + "Ref": "AWS::Region" + }, + "ap-south-1" + ] + }, + { + "Fn::Equals": [ + { + "Ref": "AWS::Region" + }, + "ap-southeast-1" + ] + }, + { + "Fn::Equals": [ + { + "Ref": "AWS::Region" + }, + "ap-southeast-2" + ] + }, + { + "Fn::Equals": [ + { + "Ref": "AWS::Region" + }, + "ca-central-1" + ] + }, + { + "Fn::Equals": [ + { + "Ref": "AWS::Region" + }, + "cn-north-1" + ] + }, + { + "Fn::Equals": [ + { + "Ref": "AWS::Region" + }, + "cn-northwest-1" + ] + } + ] + }, + { + "Fn::Or": [ + { + "Fn::Equals": [ + { + "Ref": "AWS::Region" + }, + "eu-central-1" + ] + }, + { + "Fn::Equals": [ + { + "Ref": "AWS::Region" + }, + "eu-north-1" + ] + }, + { + "Fn::Equals": [ + { + "Ref": "AWS::Region" + }, + "eu-south-1" + ] + }, + { + "Fn::Equals": [ + { + "Ref": "AWS::Region" + }, + "eu-west-1" + ] + }, + { + "Fn::Equals": [ + { + "Ref": "AWS::Region" + }, + "eu-west-2" + ] + }, + { + "Fn::Equals": [ + { + "Ref": "AWS::Region" + }, + "eu-west-3" + ] + }, + { + "Fn::Equals": [ + { + "Ref": "AWS::Region" + }, + "me-south-1" + ] + }, + { + "Fn::Equals": [ + { + "Ref": "AWS::Region" + }, + "sa-east-1" + ] + }, + { + "Fn::Equals": [ + { + "Ref": "AWS::Region" + }, + "us-east-1" + ] + }, + { + "Fn::Equals": [ + { + "Ref": "AWS::Region" + }, + "us-east-2" + ] + } + ] + }, + { + "Fn::Or": [ + { + "Fn::Equals": [ + { + "Ref": "AWS::Region" + }, + "us-west-1" + ] + }, + { + "Fn::Equals": [ + { + "Ref": "AWS::Region" + }, + "us-west-2" + ] + } + ] + } + ] + } + }, + "Parameters": { + "BootstrapVersion": { + "Type": "AWS::SSM::Parameter::Value", + "Default": "/cdk-bootstrap/hnb659fds/version", + "Description": "Version of the CDK Bootstrap resources in this environment, automatically retrieved from SSM Parameter Store. [cdk:skip]" + } + }, + "Rules": { + "CheckBootstrapVersion": { + "Assertions": [ + { + "Assert": { + "Fn::Not": [ + { + "Fn::Contains": [ + [ + "1", + "2", + "3", + "4", + "5" + ], + { + "Ref": "BootstrapVersion" + } + ] + } + ] + }, + "AssertDescription": "CDK bootstrap stack version 6 required. Please run 'cdk bootstrap' with a recent version of the CDK CLI." + } + ] + } + } +} \ No newline at end of file diff --git a/cdk.out/cdk.out b/cdk.out/cdk.out new file mode 100644 index 00000000..7925065e --- /dev/null +++ b/cdk.out/cdk.out @@ -0,0 +1 @@ +{"version":"31.0.0"} \ No newline at end of file diff --git a/cdk.out/manifest.json b/cdk.out/manifest.json new file mode 100644 index 00000000..f8a596d9 --- /dev/null +++ b/cdk.out/manifest.json @@ -0,0 +1,167 @@ +{ + "version": "31.0.0", + "artifacts": { + "CdkStack.assets": { + "type": "cdk:asset-manifest", + "properties": { + "file": "CdkStack.assets.json", + "requiresBootstrapStackVersion": 6, + "bootstrapStackVersionSsmParameter": "/cdk-bootstrap/hnb659fds/version" + } + }, + "CdkStack": { + "type": "aws:cloudformation:stack", + "environment": "aws://unknown-account/unknown-region", + "properties": { + "templateFile": "CdkStack.template.json", + "validateOnSynth": false, + "assumeRoleArn": "arn:${AWS::Partition}:iam::${AWS::AccountId}:role/cdk-hnb659fds-deploy-role-${AWS::AccountId}-${AWS::Region}", + "cloudFormationExecutionRoleArn": "arn:${AWS::Partition}:iam::${AWS::AccountId}:role/cdk-hnb659fds-cfn-exec-role-${AWS::AccountId}-${AWS::Region}", + "stackTemplateAssetObjectUrl": "s3://cdk-hnb659fds-assets-${AWS::AccountId}-${AWS::Region}/1f66503cc1455d71cbdf4d10ceddc706d2016dc01b701a1e11d88204bc108317.json", + "requiresBootstrapStackVersion": 6, + "bootstrapStackVersionSsmParameter": "/cdk-bootstrap/hnb659fds/version", + "additionalDependencies": [ + "CdkStack.assets" + ], + "lookupRole": { + "arn": "arn:${AWS::Partition}:iam::${AWS::AccountId}:role/cdk-hnb659fds-lookup-role-${AWS::AccountId}-${AWS::Region}", + "requiresBootstrapStackVersion": 8, + "bootstrapStackVersionSsmParameter": "/cdk-bootstrap/hnb659fds/version" + } + }, + "dependencies": [ + "CdkStack.assets" + ], + "metadata": { + "/CdkStack/API_GATEWAY_PRIVATE/Resource": [ + { + "type": "aws:cdk:logicalId", + "data": "APIGATEWAYPRIVATEC8B60F10", + "trace": [ + "new CustomPolicy (C:\\Users\\sirromb\\AppData\\Local\\Temp\\jsii-kernel-RpVQtO\\node_modules\\aws-cdk-lib\\aws-config\\lib\\rule.js:1:7302)", + "Kernel._create (C:\\Users\\sirromb\\AppData\\Local\\Temp\\tmppk0m2get\\lib\\program.js:9964:29)", + "Kernel.create (C:\\Users\\sirromb\\AppData\\Local\\Temp\\tmppk0m2get\\lib\\program.js:9693:29)", + "KernelHost.processRequest (C:\\Users\\sirromb\\AppData\\Local\\Temp\\tmppk0m2get\\lib\\program.js:11544:36)", + "KernelHost.run (C:\\Users\\sirromb\\AppData\\Local\\Temp\\tmppk0m2get\\lib\\program.js:11504:22)", + "Immediate._onImmediate (C:\\Users\\sirromb\\AppData\\Local\\Temp\\tmppk0m2get\\lib\\program.js:11505:46)", + "process.processImmediate (node:internal/timers:471:21)" + ] + } + ], + "/CdkStack/API_GATEWAY_PRIVATERemediationConfiguration": [ + { + "type": "aws:cdk:logicalId", + "data": "APIGATEWAYPRIVATERemediationConfiguration", + "trace": [ + "Kernel._create (C:\\Users\\sirromb\\AppData\\Local\\Temp\\tmppk0m2get\\lib\\program.js:9964:29)", + "Kernel.create (C:\\Users\\sirromb\\AppData\\Local\\Temp\\tmppk0m2get\\lib\\program.js:9693:29)", + "KernelHost.processRequest (C:\\Users\\sirromb\\AppData\\Local\\Temp\\tmppk0m2get\\lib\\program.js:11544:36)", + "KernelHost.run (C:\\Users\\sirromb\\AppData\\Local\\Temp\\tmppk0m2get\\lib\\program.js:11504:22)", + "Immediate._onImmediate (C:\\Users\\sirromb\\AppData\\Local\\Temp\\tmppk0m2get\\lib\\program.js:11505:46)", + "process.processImmediate (node:internal/timers:471:21)" + ] + } + ], + "/CdkStack/CDKMetadata/Default": [ + { + "type": "aws:cdk:logicalId", + "data": "CDKMetadata", + "trace": [ + "new MetadataResource (C:\\Users\\sirromb\\AppData\\Local\\Temp\\jsii-kernel-RpVQtO\\node_modules\\aws-cdk-lib\\core\\lib\\private\\metadata-resource.js:1:707)", + "C:\\Users\\sirromb\\AppData\\Local\\Temp\\jsii-kernel-RpVQtO\\node_modules\\aws-cdk-lib\\core\\lib\\private\\synthesis.js:2:2847", + "visit (C:\\Users\\sirromb\\AppData\\Local\\Temp\\jsii-kernel-RpVQtO\\node_modules\\aws-cdk-lib\\core\\lib\\private\\synthesis.js:4:368)", + "visit (C:\\Users\\sirromb\\AppData\\Local\\Temp\\jsii-kernel-RpVQtO\\node_modules\\aws-cdk-lib\\core\\lib\\private\\synthesis.js:4:330)", + "injectMetadataResources (C:\\Users\\sirromb\\AppData\\Local\\Temp\\jsii-kernel-RpVQtO\\node_modules\\aws-cdk-lib\\core\\lib\\private\\synthesis.js:2:2662)", + "synthesize (C:\\Users\\sirromb\\AppData\\Local\\Temp\\jsii-kernel-RpVQtO\\node_modules\\aws-cdk-lib\\core\\lib\\private\\synthesis.js:1:864)", + "App.synth (C:\\Users\\sirromb\\AppData\\Local\\Temp\\jsii-kernel-RpVQtO\\node_modules\\aws-cdk-lib\\core\\lib\\stage.js:1:2052)", + "C:\\Users\\sirromb\\AppData\\Local\\Temp\\tmppk0m2get\\lib\\program.js:9769:105", + "Kernel._ensureSync (C:\\Users\\sirromb\\AppData\\Local\\Temp\\tmppk0m2get\\lib\\program.js:10358:28)", + "Kernel.invoke (C:\\Users\\sirromb\\AppData\\Local\\Temp\\tmppk0m2get\\lib\\program.js:9769:34)", + "KernelHost.processRequest (C:\\Users\\sirromb\\AppData\\Local\\Temp\\tmppk0m2get\\lib\\program.js:11544:36)", + "KernelHost.run (C:\\Users\\sirromb\\AppData\\Local\\Temp\\tmppk0m2get\\lib\\program.js:11504:22)", + "Immediate._onImmediate (C:\\Users\\sirromb\\AppData\\Local\\Temp\\tmppk0m2get\\lib\\program.js:11505:46)", + "process.processImmediate (node:internal/timers:471:21)" + ] + } + ], + "/CdkStack/CDKMetadata/Condition": [ + { + "type": "aws:cdk:logicalId", + "data": "CDKMetadataAvailable", + "trace": [ + "new MetadataResource (C:\\Users\\sirromb\\AppData\\Local\\Temp\\jsii-kernel-RpVQtO\\node_modules\\aws-cdk-lib\\core\\lib\\private\\metadata-resource.js:1:966)", + "C:\\Users\\sirromb\\AppData\\Local\\Temp\\jsii-kernel-RpVQtO\\node_modules\\aws-cdk-lib\\core\\lib\\private\\synthesis.js:2:2847", + "visit (C:\\Users\\sirromb\\AppData\\Local\\Temp\\jsii-kernel-RpVQtO\\node_modules\\aws-cdk-lib\\core\\lib\\private\\synthesis.js:4:368)", + "visit (C:\\Users\\sirromb\\AppData\\Local\\Temp\\jsii-kernel-RpVQtO\\node_modules\\aws-cdk-lib\\core\\lib\\private\\synthesis.js:4:330)", + "injectMetadataResources (C:\\Users\\sirromb\\AppData\\Local\\Temp\\jsii-kernel-RpVQtO\\node_modules\\aws-cdk-lib\\core\\lib\\private\\synthesis.js:2:2662)", + "synthesize (C:\\Users\\sirromb\\AppData\\Local\\Temp\\jsii-kernel-RpVQtO\\node_modules\\aws-cdk-lib\\core\\lib\\private\\synthesis.js:1:864)", + "App.synth (C:\\Users\\sirromb\\AppData\\Local\\Temp\\jsii-kernel-RpVQtO\\node_modules\\aws-cdk-lib\\core\\lib\\stage.js:1:2052)", + "C:\\Users\\sirromb\\AppData\\Local\\Temp\\tmppk0m2get\\lib\\program.js:9769:105", + "Kernel._ensureSync (C:\\Users\\sirromb\\AppData\\Local\\Temp\\tmppk0m2get\\lib\\program.js:10358:28)", + "Kernel.invoke (C:\\Users\\sirromb\\AppData\\Local\\Temp\\tmppk0m2get\\lib\\program.js:9769:34)", + "KernelHost.processRequest (C:\\Users\\sirromb\\AppData\\Local\\Temp\\tmppk0m2get\\lib\\program.js:11544:36)", + "KernelHost.run (C:\\Users\\sirromb\\AppData\\Local\\Temp\\tmppk0m2get\\lib\\program.js:11504:22)", + "Immediate._onImmediate (C:\\Users\\sirromb\\AppData\\Local\\Temp\\tmppk0m2get\\lib\\program.js:11505:46)", + "process.processImmediate (node:internal/timers:471:21)" + ] + } + ], + "/CdkStack/BootstrapVersion": [ + { + "type": "aws:cdk:logicalId", + "data": "BootstrapVersion", + "trace": [ + "addBootstrapVersionRule (C:\\Users\\sirromb\\AppData\\Local\\Temp\\jsii-kernel-RpVQtO\\node_modules\\aws-cdk-lib\\core\\lib\\stack-synthesizers\\stack-synthesizer.js:1:5072)", + "DefaultStackSynthesizer.addBootstrapVersionRule (C:\\Users\\sirromb\\AppData\\Local\\Temp\\jsii-kernel-RpVQtO\\node_modules\\aws-cdk-lib\\core\\lib\\stack-synthesizers\\stack-synthesizer.js:1:2792)", + "DefaultStackSynthesizer.synthesize (C:\\Users\\sirromb\\AppData\\Local\\Temp\\jsii-kernel-RpVQtO\\node_modules\\aws-cdk-lib\\core\\lib\\stack-synthesizers\\default-synthesizer.js:1:5963)", + "C:\\Users\\sirromb\\AppData\\Local\\Temp\\jsii-kernel-RpVQtO\\node_modules\\aws-cdk-lib\\core\\lib\\private\\synthesis.js:2:3375", + "visit (C:\\Users\\sirromb\\AppData\\Local\\Temp\\jsii-kernel-RpVQtO\\node_modules\\aws-cdk-lib\\core\\lib\\private\\synthesis.js:4:368)", + "visit (C:\\Users\\sirromb\\AppData\\Local\\Temp\\jsii-kernel-RpVQtO\\node_modules\\aws-cdk-lib\\core\\lib\\private\\synthesis.js:4:330)", + "synthesizeTree (C:\\Users\\sirromb\\AppData\\Local\\Temp\\jsii-kernel-RpVQtO\\node_modules\\aws-cdk-lib\\core\\lib\\private\\synthesis.js:2:3219)", + "synthesize (C:\\Users\\sirromb\\AppData\\Local\\Temp\\jsii-kernel-RpVQtO\\node_modules\\aws-cdk-lib\\core\\lib\\private\\synthesis.js:1:1083)", + "App.synth (C:\\Users\\sirromb\\AppData\\Local\\Temp\\jsii-kernel-RpVQtO\\node_modules\\aws-cdk-lib\\core\\lib\\stage.js:1:2052)", + "C:\\Users\\sirromb\\AppData\\Local\\Temp\\tmppk0m2get\\lib\\program.js:9769:105", + "Kernel._ensureSync (C:\\Users\\sirromb\\AppData\\Local\\Temp\\tmppk0m2get\\lib\\program.js:10358:28)", + "Kernel.invoke (C:\\Users\\sirromb\\AppData\\Local\\Temp\\tmppk0m2get\\lib\\program.js:9769:34)", + "KernelHost.processRequest (C:\\Users\\sirromb\\AppData\\Local\\Temp\\tmppk0m2get\\lib\\program.js:11544:36)", + "KernelHost.run (C:\\Users\\sirromb\\AppData\\Local\\Temp\\tmppk0m2get\\lib\\program.js:11504:22)", + "Immediate._onImmediate (C:\\Users\\sirromb\\AppData\\Local\\Temp\\tmppk0m2get\\lib\\program.js:11505:46)", + "process.processImmediate (node:internal/timers:471:21)" + ] + } + ], + "/CdkStack/CheckBootstrapVersion": [ + { + "type": "aws:cdk:logicalId", + "data": "CheckBootstrapVersion", + "trace": [ + "addBootstrapVersionRule (C:\\Users\\sirromb\\AppData\\Local\\Temp\\jsii-kernel-RpVQtO\\node_modules\\aws-cdk-lib\\core\\lib\\stack-synthesizers\\stack-synthesizer.js:1:5425)", + "DefaultStackSynthesizer.addBootstrapVersionRule (C:\\Users\\sirromb\\AppData\\Local\\Temp\\jsii-kernel-RpVQtO\\node_modules\\aws-cdk-lib\\core\\lib\\stack-synthesizers\\stack-synthesizer.js:1:2792)", + "DefaultStackSynthesizer.synthesize (C:\\Users\\sirromb\\AppData\\Local\\Temp\\jsii-kernel-RpVQtO\\node_modules\\aws-cdk-lib\\core\\lib\\stack-synthesizers\\default-synthesizer.js:1:5963)", + "C:\\Users\\sirromb\\AppData\\Local\\Temp\\jsii-kernel-RpVQtO\\node_modules\\aws-cdk-lib\\core\\lib\\private\\synthesis.js:2:3375", + "visit (C:\\Users\\sirromb\\AppData\\Local\\Temp\\jsii-kernel-RpVQtO\\node_modules\\aws-cdk-lib\\core\\lib\\private\\synthesis.js:4:368)", + "visit (C:\\Users\\sirromb\\AppData\\Local\\Temp\\jsii-kernel-RpVQtO\\node_modules\\aws-cdk-lib\\core\\lib\\private\\synthesis.js:4:330)", + "synthesizeTree (C:\\Users\\sirromb\\AppData\\Local\\Temp\\jsii-kernel-RpVQtO\\node_modules\\aws-cdk-lib\\core\\lib\\private\\synthesis.js:2:3219)", + "synthesize (C:\\Users\\sirromb\\AppData\\Local\\Temp\\jsii-kernel-RpVQtO\\node_modules\\aws-cdk-lib\\core\\lib\\private\\synthesis.js:1:1083)", + "App.synth (C:\\Users\\sirromb\\AppData\\Local\\Temp\\jsii-kernel-RpVQtO\\node_modules\\aws-cdk-lib\\core\\lib\\stage.js:1:2052)", + "C:\\Users\\sirromb\\AppData\\Local\\Temp\\tmppk0m2get\\lib\\program.js:9769:105", + "Kernel._ensureSync (C:\\Users\\sirromb\\AppData\\Local\\Temp\\tmppk0m2get\\lib\\program.js:10358:28)", + "Kernel.invoke (C:\\Users\\sirromb\\AppData\\Local\\Temp\\tmppk0m2get\\lib\\program.js:9769:34)", + "KernelHost.processRequest (C:\\Users\\sirromb\\AppData\\Local\\Temp\\tmppk0m2get\\lib\\program.js:11544:36)", + "KernelHost.run (C:\\Users\\sirromb\\AppData\\Local\\Temp\\tmppk0m2get\\lib\\program.js:11504:22)", + "Immediate._onImmediate (C:\\Users\\sirromb\\AppData\\Local\\Temp\\tmppk0m2get\\lib\\program.js:11505:46)", + "process.processImmediate (node:internal/timers:471:21)" + ] + } + ] + }, + "displayName": "CdkStack" + }, + "Tree": { + "type": "cdk:tree", + "properties": { + "file": "tree.json" + } + } + } +} \ No newline at end of file diff --git a/cdk.out/tree.json b/cdk.out/tree.json new file mode 100644 index 00000000..34a9965a --- /dev/null +++ b/cdk.out/tree.json @@ -0,0 +1,165 @@ +{ + "version": "tree-0.1", + "tree": { + "id": "App", + "path": "", + "children": { + "CdkStack": { + "id": "CdkStack", + "path": "CdkStack", + "children": { + "API_GATEWAY_PRIVATE": { + "id": "API_GATEWAY_PRIVATE", + "path": "CdkStack/API_GATEWAY_PRIVATE", + "children": { + "Resource": { + "id": "Resource", + "path": "CdkStack/API_GATEWAY_PRIVATE/Resource", + "attributes": { + "aws:cdk:cloudformation:type": "AWS::Config::ConfigRule", + "aws:cdk:cloudformation:props": { + "source": { + "owner": "CUSTOM_POLICY", + "sourceDetails": [ + { + "eventSource": "aws.config", + "messageType": "ConfigurationItemChangeNotification" + }, + { + "eventSource": "aws.config", + "messageType": "OversizedConfigurationItemChangeNotification" + } + ], + "customPolicyDetails": { + "enableDebugLogDelivery": false, + "policyRuntime": "guard-2.x.x", + "policyText": "#\n# Select all AWS::ApiGateway::RestApi resources\n# present in the Resources section of the template. \n#\nlet api_gws = Resources.*[ Type == 'AWS::ApiGateway::RestApi']\n\n#\n# Rule intent: \n# 1) All AWS::ApiGateway::RestApi resources deployed must be private. \n# 2) All AWS::ApiGateway::RestApi resources deployed must have at least one AWS Identity and Access Management (IAM) policy condition key to allow access from a VPC.\n#\n# Expectations: \n# 1) SKIP when there are no AWS::ApiGateway::RestApi resources in the template. \n# 2) PASS when:\n# ALL AWS::ApiGateway::RestApi resources in the template have the EndpointConfiguration property set to Type: PRIVATE. \n# ALL AWS::ApiGateway::RestApi resources in the template have one IAM condition key specified in the Policy property with aws:sourceVpc or :SourceVpc. \n# 3) FAIL otherwise. \n#\n#\n\nrule check_rest_api_is_private when %api_gws !empty { \n %api_gws {\n Properties.EndpointConfiguration.Types[*] == \"PRIVATE\" \n } \n} \n\nrule check_rest_api_has_vpc_access when check_rest_api_is_private {\n %api_gws {\n Properties {\n #\n # ALL AWS::ApiGateway::RestApi resources in the template have one IAM condition key specified in the Policy property with \n # aws:sourceVpc or :SourceVpc\n # \n some Policy.Statement[*] {\n Condition.*[ keys == /aws:[sS]ource(Vpc|VPC|Vpce|VPCE)/ ] !empty\n }\n }\n }\n}" + } + }, + "configRuleName": "API_GATEWAY_PRIVATE", + "description": "API_GATEWAY_PRIVATE", + "inputParameters": {} + } + }, + "constructInfo": { + "fqn": "aws-cdk-lib.aws_config.CfnConfigRule", + "version": "2.78.0" + } + } + }, + "constructInfo": { + "fqn": "aws-cdk-lib.aws_config.CustomPolicy", + "version": "2.78.0" + } + }, + "API_GATEWAY_PRIVATERemediationConfiguration": { + "id": "API_GATEWAY_PRIVATERemediationConfiguration", + "path": "CdkStack/API_GATEWAY_PRIVATERemediationConfiguration", + "attributes": { + "aws:cdk:cloudformation:type": "AWS::Config::RemediationConfiguration", + "aws:cdk:cloudformation:props": { + "configRuleName": "API_GATEWAY_PRIVATE", + "targetId": "AWS-PublishSNSNotification", + "targetType": "SSM_DOCUMENT", + "automatic": true, + "maximumAutomaticAttempts": 2, + "parameters": { + "AutomationAssumeRole": { + "StaticValue": { + "Values": [ + { + "Fn::Sub": "arn:aws:iam::${AWS::AccountId}:role/sns-access" + } + ] + } + }, + "Message": { + "StaticValue": { + "Values": [ + "hi" + ] + } + }, + "TopicArn": { + "StaticValue": { + "Values": [ + { + "Fn::Sub": "arn:aws:sns:${AWS::Region}:${AWS::AccountId}:rules-notification" + } + ] + } + } + }, + "resourceType": "AWS::EC2::Instance", + "retryAttemptSeconds": 2, + "targetVersion": "1" + } + }, + "constructInfo": { + "fqn": "aws-cdk-lib.aws_config.CfnRemediationConfiguration", + "version": "2.78.0" + } + }, + "CDKMetadata": { + "id": "CDKMetadata", + "path": "CdkStack/CDKMetadata", + "children": { + "Default": { + "id": "Default", + "path": "CdkStack/CDKMetadata/Default", + "constructInfo": { + "fqn": "aws-cdk-lib.CfnResource", + "version": "2.78.0" + } + }, + "Condition": { + "id": "Condition", + "path": "CdkStack/CDKMetadata/Condition", + "constructInfo": { + "fqn": "aws-cdk-lib.CfnCondition", + "version": "2.78.0" + } + } + }, + "constructInfo": { + "fqn": "constructs.Construct", + "version": "10.2.17" + } + }, + "BootstrapVersion": { + "id": "BootstrapVersion", + "path": "CdkStack/BootstrapVersion", + "constructInfo": { + "fqn": "aws-cdk-lib.CfnParameter", + "version": "2.78.0" + } + }, + "CheckBootstrapVersion": { + "id": "CheckBootstrapVersion", + "path": "CdkStack/CheckBootstrapVersion", + "constructInfo": { + "fqn": "aws-cdk-lib.CfnRule", + "version": "2.78.0" + } + } + }, + "constructInfo": { + "fqn": "aws-cdk-lib.Stack", + "version": "2.78.0" + } + }, + "Tree": { + "id": "Tree", + "path": "Tree", + "constructInfo": { + "fqn": "constructs.Construct", + "version": "10.2.17" + } + } + }, + "constructInfo": { + "fqn": "aws-cdk-lib.App", + "version": "2.78.0" + } + } +} \ No newline at end of file diff --git a/rdk/cli/__init__.py b/rdk/cli/__init__.py index e69de29b..15096eb1 100644 --- a/rdk/cli/__init__.py +++ b/rdk/cli/__init__.py @@ -0,0 +1,11 @@ +from rdk.version import __version__ + +# Package metadata +NAME = "rdk_cli" +DIST_NAME = "rdk_cli" +CLI_NAME = "rdk_cli" +VERSION = __version__ +DESCRIPTION = "This package contains the code for RDK's CLI commands." +MAINTAINER = "RDK maintainer" +MAINTAINER_EMAIL = "rdk-maintainers@amazon.com" +URL = "https://github.com/awslabs/aws-config-rdk" diff --git a/rdk/cli/commands/__init__.py b/rdk/cli/commands/__init__.py index e69de29b..ca5226d1 100644 --- a/rdk/cli/commands/__init__.py +++ b/rdk/cli/commands/__init__.py @@ -0,0 +1 @@ +# RDK CLI Commands Package diff --git a/rdk/cli/commands/clean.py b/rdk/cli/commands/clean.py new file mode 100644 index 00000000..5f26b728 --- /dev/null +++ b/rdk/cli/commands/clean.py @@ -0,0 +1,14 @@ +import sys + +# from rdk.core.init import RdkInitializer +from rdk.utils.logger import get_main_logger + + +def run(): + """ + clean sub-command handler. + """ + logger = get_main_logger() + logger.info("AWS Config cleaning is starting ...") + + sys.exit(print("RDK clean")) diff --git a/rdk/cli/commands/create.py b/rdk/cli/commands/create.py new file mode 100644 index 00000000..566f63a8 --- /dev/null +++ b/rdk/cli/commands/create.py @@ -0,0 +1,14 @@ +import sys + +# from rdk.core.init import RdkInitializer +from rdk.utils.logger import get_main_logger + + +def run(): + """ + create sub-command handler. + """ + logger = get_main_logger() + logger.info("AWS Config create is starting ...") + + sys.exit(print("RDK create")) diff --git a/rdk/cli/commands/create_region_set.py b/rdk/cli/commands/create_region_set.py new file mode 100644 index 00000000..e69de29b diff --git a/rdk/cli/commands/create_rule_template.py b/rdk/cli/commands/create_rule_template.py new file mode 100644 index 00000000..9fc49ec4 --- /dev/null +++ b/rdk/cli/commands/create_rule_template.py @@ -0,0 +1,14 @@ +import sys + +# from rdk.core.init import RdkInitializer +from rdk.utils.logger import get_main_logger + + +def run(): + """ + create rule template sub-command handler. + """ + logger = get_main_logger() + logger.info("AWS Config create rule template is starting ...") + + sys.exit(print("RDK creating rule template")) diff --git a/rdk/cli/commands/deploy_organization.py b/rdk/cli/commands/deploy_organization.py new file mode 100644 index 00000000..b726cedc --- /dev/null +++ b/rdk/cli/commands/deploy_organization.py @@ -0,0 +1,14 @@ +import sys + +# from rdk.core.init import RdkInitializer +from rdk.utils.logger import get_main_logger + + +def run(): + """ + deploy-organization sub-command handler. + """ + logger = get_main_logger() + logger.info("AWS Config deploy organization is starting ...") + + sys.exit(print("RDK deploying to organization")) diff --git a/rdk/cli/commands/destroy.py b/rdk/cli/commands/destroy.py index c9fc12e8..ae14b6e4 100644 --- a/rdk/cli/commands/destroy.py +++ b/rdk/cli/commands/destroy.py @@ -5,6 +5,7 @@ from rdk.utils.logger import get_main_logger +# TODO - should this be named undeploy for consistency with RDK v0? def run(rulenames: List[str], dryrun: bool): """ test sub-command handler. diff --git a/rdk/cli/commands/export.py b/rdk/cli/commands/export.py new file mode 100644 index 00000000..e69de29b diff --git a/rdk/cli/commands/logs.py b/rdk/cli/commands/logs.py new file mode 100644 index 00000000..e69de29b diff --git a/rdk/cli/commands/modify.py b/rdk/cli/commands/modify.py new file mode 100644 index 00000000..e69de29b diff --git a/rdk/cli/commands/rulesets.py b/rdk/cli/commands/rulesets.py new file mode 100644 index 00000000..e69de29b diff --git a/rdk/cli/commands/sample_ci.py b/rdk/cli/commands/sample_ci.py new file mode 100644 index 00000000..1c21942a --- /dev/null +++ b/rdk/cli/commands/sample_ci.py @@ -0,0 +1,22 @@ +import argparse +import json +import sys + +# from rdk.core.init import RdkInitializer +from rdk.utils.logger import get_main_logger +from rdk.core.get_accepted_resource_types import get_accepted_resource_types +from rdk.core.sample_ci import TestCI + + +def run(resource_type: str): + """ + sample-ci sub-command handler. + """ + logger = get_main_logger() + logger.info("AWS Config sample CI is starting ...") + my_test_ci = TestCI(resource_type) + print(json.dumps(my_test_ci.get_json(), indent=4)) + print( + f"For more info, try checking: https://github.com/awslabs/aws-config-resource-schema/blob/master/config/properties/resource-types/" + ) + sys.exit(0) # TODO - Necessary? diff --git a/rdk/cli/commands/test.py b/rdk/cli/commands/test.py index eef3e225..feb52eb2 100644 --- a/rdk/cli/commands/test.py +++ b/rdk/cli/commands/test.py @@ -5,6 +5,7 @@ from rdk.utils.logger import get_main_logger +# TODO - should this be named test_local for consistency with RDK v0? def run(rulenames: List[str], verbose=False): """ test sub-command handler. diff --git a/rdk/cli/commands/undeploy_organization.py b/rdk/cli/commands/undeploy_organization.py new file mode 100644 index 00000000..e69de29b diff --git a/rdk/cli/main.py b/rdk/cli/main.py index f5a925e3..470a1d46 100644 --- a/rdk/cli/main.py +++ b/rdk/cli/main.py @@ -8,7 +8,9 @@ import rdk.cli.commands.init as init_cmd import rdk.cli.commands.test as test_cmd import rdk.cli.commands.destroy as destroy_cmd +import rdk.cli.commands.sample_ci as sample_ci_cmd import rdk.utils.logger as rdk_logger +from rdk.core.get_accepted_resource_types import get_accepted_resource_types def main(): @@ -132,6 +134,19 @@ def main(): "_pytest", ) + # sample-ci + commands_parser_sample_ci = commands_parser.add_parser( + "sample-ci", + help="Provides a way to see sample configuration items for most supported resource types.", + ) + + commands_parser_sample_ci.add_argument( + "ci_type", + metavar="", + help='Resource name (e.g. "AWS::EC2::Instance") to display a sample CI JSON document for.', + choices=get_accepted_resource_types(), + ) + # Parse all args and commands args = main_parser.parse_args() @@ -172,3 +187,9 @@ def main(): rulenames=args.rulename, dryrun=args.dryrun, ) + + # handle: sample-ci + if args.command == "sample-ci": + sample_ci_cmd.run( + resource_type=args.ci_type, + ) diff --git a/rdk/core/get_accepted_resource_types.py b/rdk/core/get_accepted_resource_types.py new file mode 100644 index 00000000..6002a2d7 --- /dev/null +++ b/rdk/core/get_accepted_resource_types.py @@ -0,0 +1,317 @@ +""" +Helper function to return resource types supported by AWS Config + +This need to be update whenever config service supports more resource types: +https://docs.aws.amazon.com/config/latest/developerguide/resource-config-reference.html +""" + + +# TODO - Read this list from a text file (ideally one automatically pulled from AWS docs) +def get_accepted_resource_types(): + return [ + "AWS::ACM::Certificate", + "AWS::AccessAnalyzer::Analyzer", + "AWS::AmazonMQ::Broker", + "AWS::ApiGateway::RestApi", + "AWS::ApiGateway::Stage", + "AWS::ApiGatewayV2::Api", + "AWS::ApiGatewayV2::Stage", + "AWS::AppConfig::Application", + "AWS::AppConfig::ConfigurationProfile", + "AWS::AppConfig::DeploymentStrategy", + "AWS::AppConfig::Environment", + "AWS::AppFlow::Flow", + "AWS::AppStream::DirectoryConfig", + "AWS::AppSync::GraphQLApi", + "AWS::Athena::DataCatalog", + "AWS::Athena::WorkGroup", + "AWS::AuditManager::Assessment", + "AWS::AutoScaling::AutoScalingGroup", + "AWS::AutoScaling::LaunchConfiguration", + "AWS::AutoScaling::ScalingPolicy", + "AWS::AutoScaling::ScheduledAction", + "AWS::AutoScaling::WarmPool", + "AWS::Backup::BackupPlan", + "AWS::Backup::BackupSelection", + "AWS::Backup::BackupVault", + "AWS::Backup::RecoveryPoint", + "AWS::Backup::ReportPlan", + "AWS::Batch::ComputeEnvironment", + "AWS::Batch::JobQueue", + "AWS::Budgets::BudgetsAction", + "AWS::Cloud9::EnvironmentEC2", + "AWS::CloudFormation::Stack", + "AWS::CloudFront::Distribution", + "AWS::CloudFront::StreamingDistribution", + "AWS::CloudTrail::Trail", + "AWS::CloudWatch::Alarm", + "AWS::CloudWatch::MetricStream", + "AWS::CodeBuild::Project", + "AWS::CodeDeploy::Application", + "AWS::CodeDeploy::DeploymentConfig", + "AWS::CodeDeploy::DeploymentGroup", + "AWS::CodeGuruReviewer::RepositoryAssociation", + "AWS::CodePipeline::Pipeline", + "AWS::Config::ConfigurationRecorder", + "AWS::Config::ConformancePackCompliance", + "AWS::Config::ResourceCompliance", + "AWS::Connect::PhoneNumber", + "AWS::CustomerProfiles::Domain", + "AWS::DMS::Certificate", + "AWS::DMS::EventSubscription", + "AWS::DMS::ReplicationInstance", + "AWS::DMS::ReplicationSubnetGroup", + "AWS::DMS::ReplicationTask", + "AWS::DataSync::LocationEFS", + "AWS::DataSync::LocationFSxLustre", + "AWS::DataSync::LocationFSxWindows", + "AWS::DataSync::LocationHDFS", + "AWS::DataSync::LocationNFS", + "AWS::DataSync::LocationObjectStorage", + "AWS::DataSync::LocationS3", + "AWS::DataSync::LocationSMB", + "AWS::DataSync::Task", + "AWS::Detective::Graph", + "AWS::DeviceFarm::InstanceProfile", + "AWS::DeviceFarm::Project", + "AWS::DeviceFarm::TestGridProject", + "AWS::DynamoDB::Table", + "AWS::EC2::CustomerGateway", + "AWS::EC2::DHCPOptions", + "AWS::EC2::EC2Fleet", + "AWS::EC2::EIP", + "AWS::EC2::EgressOnlyInternetGateway", + "AWS::EC2::FlowLog", + "AWS::EC2::Host", + "AWS::EC2::IPAM", + "AWS::EC2::Instance", + "AWS::EC2::InternetGateway", + "AWS::EC2::LaunchTemplate", + "AWS::EC2::NatGateway", + "AWS::EC2::NetworkAcl", + "AWS::EC2::NetworkInsightsAccessScopeAnalysis", + "AWS::EC2::NetworkInsightsPath", + "AWS::EC2::NetworkInterface", + "AWS::EC2::RegisteredHAInstance", + "AWS::EC2::RouteTable", + "AWS::EC2::SecurityGroup", + "AWS::EC2::Subnet", + "AWS::EC2::SubnetRouteTableAssociation", + "AWS::EC2::TrafficMirrorFilter", + "AWS::EC2::TrafficMirrorSession", + "AWS::EC2::TrafficMirrorTarget", + "AWS::EC2::TransitGateway", + "AWS::EC2::TransitGatewayAttachment", + "AWS::EC2::TransitGatewayRouteTable", + "AWS::EC2::VPC", + "AWS::EC2::VPCEndpoint", + "AWS::EC2::VPCEndpointService", + "AWS::EC2::VPCPeeringConnection", + "AWS::EC2::VPNConnection", + "AWS::EC2::VPNGateway", + "AWS::EC2::Volume", + "AWS::ECR::PublicRepository", + "AWS::ECR::PullThroughCacheRule", + "AWS::ECR::RegistryPolicy", + "AWS::ECR::Repository", + "AWS::ECS::Cluster", + "AWS::ECS::Service", + "AWS::ECS::TaskDefinition", + "AWS::EFS::AccessPoint", + "AWS::EFS::FileSystem", + "AWS::EKS::Addon", + "AWS::EKS::Cluster", + "AWS::EKS::FargateProfile", + "AWS::EKS::IdentityProviderConfig", + "AWS::EMR::SecurityConfiguration", + "AWS::ElasticBeanstalk::Application", + "AWS::ElasticBeanstalk::ApplicationVersion", + "AWS::ElasticBeanstalk::Environment", + "AWS::ElasticLoadBalancing::LoadBalancer", + "AWS::ElasticLoadBalancingV2::Listener", + "AWS::ElasticLoadBalancingV2::LoadBalancer", + "AWS::ElasticSearch::Domain", + "AWS::EventSchemas::Discoverer", + "AWS::EventSchemas::Registry", + "AWS::EventSchemas::RegistryPolicy", + "AWS::EventSchemas::Schema", + "AWS::Events::ApiDestination", + "AWS::Events::Archive", + "AWS::Events::Connection", + "AWS::Events::Endpoint", + "AWS::Events::EventBus", + "AWS::Events::Rule", + "AWS::FIS::ExperimentTemplate", + "AWS::FraudDetector::EntityType", + "AWS::FraudDetector::Label", + "AWS::FraudDetector::Outcome", + "AWS::FraudDetector::Variable", + "AWS::GlobalAccelerator::Accelerator", + "AWS::GlobalAccelerator::EndpointGroup", + "AWS::GlobalAccelerator::Listener", + "AWS::Glue::Classifier", + "AWS::Glue::Job", + "AWS::Glue::MLTransform", + "AWS::GroundStation::Config", + "AWS::GuardDuty::Detector", + "AWS::GuardDuty::Filter", + "AWS::GuardDuty::IPSet", + "AWS::GuardDuty::ThreatIntelSet", + "AWS::HealthLake::FHIRDatastore", + "AWS::IAM::Group", + "AWS::IAM::Policy", + "AWS::IAM::Role", + "AWS::IAM::User", + "AWS::IVS::Channel", + "AWS::IVS::PlaybackKeyPair", + "AWS::IVS::RecordingConfiguration", + "AWS::ImageBuilder::ContainerRecipe", + "AWS::ImageBuilder::DistributionConfiguration", + "AWS::ImageBuilder::ImagePipeline", + "AWS::ImageBuilder::InfrastructureConfiguration", + "AWS::IoT::AccountAuditConfiguration", + "AWS::IoT::Authorizer", + "AWS::IoT::CustomMetric", + "AWS::IoT::Dimension", + "AWS::IoT::FleetMetric", + "AWS::IoT::MitigationAction", + "AWS::IoT::Policy", + "AWS::IoT::RoleAlias", + "AWS::IoT::ScheduledAudit", + "AWS::IoT::SecurityProfile", + "AWS::IoTAnalytics::Channel", + "AWS::IoTAnalytics::Dataset", + "AWS::IoTAnalytics::Datastore", + "AWS::IoTAnalytics::Pipeline", + "AWS::IoTEvents::AlarmModel", + "AWS::IoTEvents::DetectorModel", + "AWS::IoTEvents::Input", + "AWS::IoTSiteWise::AssetModel", + "AWS::IoTSiteWise::Dashboard", + "AWS::IoTSiteWise::Gateway", + "AWS::IoTSiteWise::Portal", + "AWS::IoTSiteWise::Project", + "AWS::IoTTwinMaker::Entity", + "AWS::IoTTwinMaker::Scene", + "AWS::IoTTwinMaker::Workspace", + "AWS::IoTWireless::ServiceProfile", + "AWS::KMS::Alias", + "AWS::KMS::Key", + "AWS::Kinesis::Stream", + "AWS::Kinesis::StreamConsumer", + "AWS::KinesisAnalyticsV2::Application", + "AWS::KinesisVideo::SignalingChannel", + "AWS::Lambda::Function", + "AWS::Lex::Bot", + "AWS::Lex::BotAlias", + "AWS::Lightsail::Bucket", + "AWS::Lightsail::Certificate", + "AWS::Lightsail::Disk", + "AWS::Lightsail::StaticIp", + "AWS::LookoutMetrics::Alert", + "AWS::LookoutVision::Project", + "AWS::MSK::Cluster", + "AWS::MediaPackage::PackagingConfiguration", + "AWS::MediaPackage::PackagingGroup", + "AWS::NetworkFirewall::Firewall", + "AWS::NetworkFirewall::FirewallPolicy", + "AWS::NetworkFirewall::RuleGroup", + "AWS::NetworkFirewall::TLSInspectionConfiguration", + "AWS::NetworkManager::Device", + "AWS::NetworkManager::GlobalNetwork", + "AWS::NetworkManager::Link", + "AWS::NetworkManager::Site", + "AWS::NetworkManager::TransitGatewayRegistration", + "AWS::OpenSearch::Domain", + "AWS::Panorama::Package", + "AWS::Pinpoint::App", + "AWS::Pinpoint::ApplicationSettings", + "AWS::Pinpoint::Segment", + "AWS::QLDB::Ledger", + "AWS::RDS::DBCluster", + "AWS::RDS::DBClusterSnapshot", + "AWS::RDS::DBInstance", + "AWS::RDS::DBSecurityGroup", + "AWS::RDS::DBSnapshot", + "AWS::RDS::DBSubnetGroup", + "AWS::RDS::EventSubscription", + "AWS::RDS::GlobalCluster", + "AWS::RUM::AppMonitor", + "AWS::Redshift::Cluster", + "AWS::Redshift::ClusterParameterGroup", + "AWS::Redshift::ClusterSecurityGroup", + "AWS::Redshift::ClusterSnapshot", + "AWS::Redshift::ClusterSubnetGroup", + "AWS::Redshift::EventSubscription", + "AWS::Redshift::ScheduledAction", + "AWS::ResilienceHub::ResiliencyPolicy", + "AWS::RoboMaker::RobotApplication", + "AWS::RoboMaker::RobotApplicationVersion", + "AWS::RoboMaker::SimulationApplication", + "AWS::Route53::HealthCheck", + "AWS::Route53::HostedZone", + "AWS::Route53RecoveryControl::Cluster", + "AWS::Route53RecoveryControl::ControlPanel", + "AWS::Route53RecoveryControl::RoutingControl", + "AWS::Route53RecoveryControl::SafetyRule", + "AWS::Route53RecoveryReadiness::Cell", + "AWS::Route53RecoveryReadiness::ReadinessCheck", + "AWS::Route53RecoveryReadiness::RecoveryGroup", + "AWS::Route53RecoveryReadiness::ResourceSet", + "AWS::Route53Resolver::FirewallDomainList", + "AWS::Route53Resolver::FirewallRuleGroupAssociation", + "AWS::Route53Resolver::ResolverEndpoint", + "AWS::Route53Resolver::ResolverRule", + "AWS::Route53Resolver::ResolverRuleAssociation", + "AWS::S3::AccountPublicAccessBlock", + "AWS::S3::Bucket", + "AWS::S3::MultiRegionAccessPoint", + "AWS::S3::StorageLens", + "AWS::SES::ConfigurationSet", + "AWS::SES::ContactList", + "AWS::SES::ReceiptFilter", + "AWS::SES::ReceiptRuleSet", + "AWS::SES::Template", + "AWS::SNS::Topic", + "AWS::SQS::Queue", + "AWS::SSM::AssociationCompliance", + "AWS::SSM::FileData", + "AWS::SSM::ManagedInstanceInventory", + "AWS::SSM::PatchCompliance", + "AWS::SageMaker::AppImageConfig", + "AWS::SageMaker::CodeRepository", + "AWS::SageMaker::EndpointConfig", + "AWS::SageMaker::Image", + "AWS::SageMaker::Model", + "AWS::SageMaker::NotebookInstance", + "AWS::SageMaker::NotebookInstanceLifecycleConfig", + "AWS::SageMaker::Workteam", + "AWS::SecretsManager::Secret", + "AWS::ServiceCatalog::CloudFormationProduct", + "AWS::ServiceCatalog::CloudFormationProvisionedProduct", + "AWS::ServiceCatalog::Portfolio", + "AWS::ServiceDiscovery::HttpNamespace", + "AWS::ServiceDiscovery::PublicDnsNamespace", + "AWS::ServiceDiscovery::Service", + "AWS::Shield::Protection", + "AWS::ShieldRegional::Protection", + "AWS::StepFunctions::Activity", + "AWS::StepFunctions::StateMachine", + "AWS::Transfer::Workflow", + "AWS::WAF::RateBasedRule", + "AWS::WAF::Rule", + "AWS::WAF::RuleGroup", + "AWS::WAF::WebACL", + "AWS::WAFRegional::RateBasedRule", + "AWS::WAFRegional::Rule", + "AWS::WAFRegional::RuleGroup", + "AWS::WAFRegional::WebACL", + "AWS::WAFv2::IPSet", + "AWS::WAFv2::ManagedRuleSet", + "AWS::WAFv2::RegexPatternSet", + "AWS::WAFv2::RuleGroup", + "AWS::WAFv2::WebACL", + "AWS::WorkSpaces::ConnectionAlias", + "AWS::WorkSpaces::Workspace", + "AWS::XRay::EncryptionConfig", + ] diff --git a/rdk/core/sample_ci.py b/rdk/core/sample_ci.py new file mode 100644 index 00000000..f5ff1e47 --- /dev/null +++ b/rdk/core/sample_ci.py @@ -0,0 +1,37 @@ +import json +import os + +TEMPLATE_DIR = "templates" +SAMPLE_CI_DIR = "ci_examples" + +""" +SUMMARY +This class takes the name of a CloudFormation resource and loads an example JSON representing its CI. +""" + + +class TestCI: + def __init__(self, ci_type): + # convert ci_type string to filename format + ci_file = ci_type.replace("::", "_") + ".json" + try: + self.ci_json = json.load( + open( + os.path.join( + os.path.dirname(__file__), TEMPLATE_DIR, SAMPLE_CI_DIR, ci_file + ), + "r", + ) + ) + except FileNotFoundError: + resource_url = "https://github.com/awslabs/aws-config-resource-schema/blob/master/config/properties/resource-types/" + print( + "No sample CI found for " + + ci_type + + ", even though it appears to be a supported CI. Please log an issue at https://github.com/awslabs/aws-config-rdk." + + f"\nLook here: {resource_url} for additional info" + ) + exit(1) # TODO - Is this too aggressive? + + def get_json(self): + return self.ci_json diff --git a/rdk/core/templates/ci_examples/AWS_S3_Bucket.json b/rdk/core/templates/ci_examples/AWS_S3_Bucket.json new file mode 100644 index 00000000..6650f50a --- /dev/null +++ b/rdk/core/templates/ci_examples/AWS_S3_Bucket.json @@ -0,0 +1,47 @@ +{ + "relatedEvents": [], + "relationships": [], + "configuration": { + "name": "mborch-test-bucket-config-item", + "owner": { + "displayName": null, + "id": "8ac813c84b8ad4d9f3e35f80aaeb254f85922a902ce87ed6b49ccbfa188f5a12" + }, + "creationDate": "2019-02-12T07:21:45.000Z" + }, + "supplementaryConfiguration": { + "AccessControlList": "{\"grantSet\":null,\"grantList\":[{\"grantee\":{\"id\":\"8ac813c84b8ad4d9f3e35f80aaeb254f85922a902ce87ed6b49ccbfa188f5a12\",\"displayName\":null},\"permission\":\"FullControl\"}],\"owner\":{\"displayName\":null,\"id\":\"8ac813c84b8ad4d9f3e35f80aaeb254f85922a902ce87ed6b49ccbfa188f5a12\"},\"isRequesterCharged\":false}", + "BucketAccelerateConfiguration": { + "status": null + }, + "BucketLoggingConfiguration": { + "destinationBucketName": null, + "logFilePrefix": null + }, + "BucketNotificationConfiguration": { + "configurations": {} + }, + "BucketPolicy": { + "policyText": "{\"Version\":\"2012-10-17\",\"Id\":\"Policy1478390053757\",\"Statement\":[{\"Sid\":\"Stmt1478389920384\",\"Effect\":\"Deny\",\"Principal\":\"*\",\"Action\":\"s3:*\",\"Resource\":\"arn:aws:s3:::mborch-test-bucket-config-item\",\"Condition\":{\"Bool\":{\"aws:SecureTransport\":\"false\"}}}]}" + }, + "BucketVersioningConfiguration": { + "status": "Enabled", + "isMfaDeleteEnabled": null + }, + "IsRequesterPaysEnabled": false + }, + "tags": {}, + "configurationItemVersion": "1.3", + "configurationItemCaptureTime": "2019-02-12T07:29:16.288Z", + "configurationStateId": 1549956556288, + "awsAccountId": "934654633380", + "configurationItemStatus": "OK", + "resourceType": "AWS::S3::Bucket", + "resourceId": "mborch-test-bucket-config-item", + "resourceName": "mborch-test-bucket-config-item", + "ARN": "arn:aws:s3:::mborch-test-bucket-config-item", + "awsRegion": "us-east-1", + "availabilityZone": "Regional", + "configurationStateMd5Hash": "", + "resourceCreationTime": "2019-02-12T07:21:45.000Z" + } \ No newline at end of file diff --git a/rdk/frameworks/cdk/app.py b/rdk/frameworks/cdk/app.py index 2859800d..26e23e65 100644 --- a/rdk/frameworks/cdk/app.py +++ b/rdk/frameworks/cdk/app.py @@ -1,4 +1,4 @@ -#!/usr/bin/env python3 +#!/usr/bin/env python import os import aws_cdk as cdk diff --git a/rdk/frameworks/cdk/cdk/core/rule_parameters.py b/rdk/frameworks/cdk/cdk/core/rule_parameters.py index faafd148..5f843ddc 100644 --- a/rdk/frameworks/cdk/cdk/core/rule_parameters.py +++ b/rdk/frameworks/cdk/cdk/core/rule_parameters.py @@ -20,8 +20,6 @@ "python3.9-lib", "python3.10", "python3.10-lib", - # "nodejs6.10", - # "nodejs8.10", ] @@ -49,7 +47,7 @@ def get_rule_name(rule_path: Path): ) if len(rule_name) > 128: raise RdkParametersInvalidError( - "Error: Found Rule with name over 128 characters: {rule_name} \n Recreate the Rule with a shorter name." + f"Error: Found Rule with name over 128 characters: {rule_name} \n Recreate the Rule with a shorter name." ) return rule_name diff --git a/rdk/runners/base.py b/rdk/runners/base.py index 06537063..db6bdcf3 100644 --- a/rdk/runners/base.py +++ b/rdk/runners/base.py @@ -81,6 +81,7 @@ def run_cmd( # pylint: disable=too-many-arguments,too-many-locals,too-many-stat "stderr": subprocess.PIPE, # We're only dealing with text streams for now "universal_newlines": True, + "shell": True, # Added to make this work for Windows environments } if cwd: subprocess_popen_kwargs["cwd"] = cwd @@ -125,6 +126,13 @@ def _log_streams(is_final: bool = False): """ Log stuff based on stdout or stderr. """ + # Log streaming currently fails for Windows + # OSError: [WinError 10038] An operation was attempted on something that is not a socket + # Capture the error for now. + try: + selctr.select() + except Exception as ex: + return # TODO - actually implement this for Windows for _selkey, _ in selctr.select(): # NOTE: Selector key can be empty if _selkey: @@ -181,6 +189,7 @@ def _log_streams(is_final: bool = False): raise RdkCommandInvokeError("Failed to invoke requested command") from exc if return_code not in allowed_return_codes: + self.logger.info(f"Return code was {return_code}") # log any errors for _line in captured_stderr_lines: self.logger.error(_line) diff --git a/rdk/runners/cdk.py b/rdk/runners/cdk.py index 1e15ef29..b481f310 100644 --- a/rdk/runners/cdk.py +++ b/rdk/runners/cdk.py @@ -34,6 +34,7 @@ def __post_init__(self): # shutil.rmtree(self.root_module / "cdk") # shutil.copytree(Path(__file__).resolve().parent.parent /'frameworks' / 'cdk', self.root_module / 'cdk') # self.cdk_app_dir = self.root_module / "cdk" + # TODO - should this actually be the CDK application's path? I don't understand what Ricky was doing here. self.cdk_app_dir = Path(__file__).resolve().parent.parent / "frameworks" / "cdk" def synthesize(self): @@ -74,9 +75,20 @@ def diff(self): Parameters: """ - cmd = ["cdk", "diff", "--context", "rules_dir=" + self.rules_dir.as_posix()] + cmd = [ + "cdk", + "diff", + "--context", + "rules_dir=" + self.rules_dir.as_posix(), + ] + + self.logger.info( + f"Showing differences on CloudFormation template(s) for rule {self.rules_dir.as_posix()}..." + ) - self.logger.info("Showing differences on CloudFormation template(s)...") + self.logger.info( + f"Running cmd {cmd} in directory {self.cdk_app_dir.as_posix()}..." + ) self.run_cmd( cmd=cmd, @@ -98,7 +110,7 @@ def bootstrap(self): "rules_dir=" + self.rules_dir.as_posix(), ] - self.logger.info("Envrionment Bootstrapping ...") + self.logger.info("Environment Bootstrapping ...") self.run_cmd( cmd=cmd, @@ -116,6 +128,8 @@ def deploy(self): cmd = [ "cdk", "deploy", + "--app", + (self.cdk_app_dir / "cdk.out").as_posix(), "--context", "rules_dir=" + self.rules_dir.as_posix(), "--require-approval", diff --git a/tox.ini b/tox.ini new file mode 100644 index 00000000..66a14670 --- /dev/null +++ b/tox.ini @@ -0,0 +1,2 @@ +[flake8] +max-line-length=140 \ No newline at end of file From 55dba3be65a60471da2b14fd8b57e270ecd5abb6 Mon Sep 17 00:00:00 2001 From: Benjamin Morris Date: Mon, 8 May 2023 18:27:49 -0700 Subject: [PATCH 14/23] gitignore update --- .gitignore | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/.gitignore b/.gitignore index 08ee0e3e..35ed8589 100644 --- a/.gitignore +++ b/.gitignore @@ -369,4 +369,5 @@ docs/CHANGELOG.md # CDK -**/cdk.out/ \ No newline at end of file +**/cdk.out/ +cdk.out/ \ No newline at end of file From 89c3d6238c5616cc2420bbffe0eef82d0e120a4a Mon Sep 17 00:00:00 2001 From: Benjamin Morris <93620006+bmorrissirromb@users.noreply.github.com> Date: Mon, 8 May 2023 18:28:12 -0700 Subject: [PATCH 15/23] Delete cdk.out directory --- cdk.out/CdkStack.assets.json | 19 -- cdk.out/CdkStack.template.json | 314 --------------------------------- cdk.out/cdk.out | 1 - cdk.out/manifest.json | 167 ------------------ cdk.out/tree.json | 165 ----------------- 5 files changed, 666 deletions(-) delete mode 100644 cdk.out/CdkStack.assets.json delete mode 100644 cdk.out/CdkStack.template.json delete mode 100644 cdk.out/cdk.out delete mode 100644 cdk.out/manifest.json delete mode 100644 cdk.out/tree.json diff --git a/cdk.out/CdkStack.assets.json b/cdk.out/CdkStack.assets.json deleted file mode 100644 index 0f012846..00000000 --- a/cdk.out/CdkStack.assets.json +++ /dev/null @@ -1,19 +0,0 @@ -{ - "version": "31.0.0", - "files": { - "1f66503cc1455d71cbdf4d10ceddc706d2016dc01b701a1e11d88204bc108317": { - "source": { - "path": "CdkStack.template.json", - "packaging": "file" - }, - "destinations": { - "current_account-current_region": { - "bucketName": "cdk-hnb659fds-assets-${AWS::AccountId}-${AWS::Region}", - "objectKey": "1f66503cc1455d71cbdf4d10ceddc706d2016dc01b701a1e11d88204bc108317.json", - "assumeRoleArn": "arn:${AWS::Partition}:iam::${AWS::AccountId}:role/cdk-hnb659fds-file-publishing-role-${AWS::AccountId}-${AWS::Region}" - } - } - } - }, - "dockerImages": {} -} \ No newline at end of file diff --git a/cdk.out/CdkStack.template.json b/cdk.out/CdkStack.template.json deleted file mode 100644 index 204a2811..00000000 --- a/cdk.out/CdkStack.template.json +++ /dev/null @@ -1,314 +0,0 @@ -{ - "Resources": { - "APIGATEWAYPRIVATEC8B60F10": { - "Type": "AWS::Config::ConfigRule", - "Properties": { - "Source": { - "CustomPolicyDetails": { - "EnableDebugLogDelivery": false, - "PolicyRuntime": "guard-2.x.x", - "PolicyText": "#\n# Select all AWS::ApiGateway::RestApi resources\n# present in the Resources section of the template. \n#\nlet api_gws = Resources.*[ Type == 'AWS::ApiGateway::RestApi']\n\n#\n# Rule intent: \n# 1) All AWS::ApiGateway::RestApi resources deployed must be private. \n# 2) All AWS::ApiGateway::RestApi resources deployed must have at least one AWS Identity and Access Management (IAM) policy condition key to allow access from a VPC.\n#\n# Expectations: \n# 1) SKIP when there are no AWS::ApiGateway::RestApi resources in the template. \n# 2) PASS when:\n# ALL AWS::ApiGateway::RestApi resources in the template have the EndpointConfiguration property set to Type: PRIVATE. \n# ALL AWS::ApiGateway::RestApi resources in the template have one IAM condition key specified in the Policy property with aws:sourceVpc or :SourceVpc. \n# 3) FAIL otherwise. \n#\n#\n\nrule check_rest_api_is_private when %api_gws !empty { \n %api_gws {\n Properties.EndpointConfiguration.Types[*] == \"PRIVATE\" \n } \n} \n\nrule check_rest_api_has_vpc_access when check_rest_api_is_private {\n %api_gws {\n Properties {\n #\n # ALL AWS::ApiGateway::RestApi resources in the template have one IAM condition key specified in the Policy property with \n # aws:sourceVpc or :SourceVpc\n # \n some Policy.Statement[*] {\n Condition.*[ keys == /aws:[sS]ource(Vpc|VPC|Vpce|VPCE)/ ] !empty\n }\n }\n }\n}" - }, - "Owner": "CUSTOM_POLICY", - "SourceDetails": [ - { - "EventSource": "aws.config", - "MessageType": "ConfigurationItemChangeNotification" - }, - { - "EventSource": "aws.config", - "MessageType": "OversizedConfigurationItemChangeNotification" - } - ] - }, - "ConfigRuleName": "API_GATEWAY_PRIVATE", - "Description": "API_GATEWAY_PRIVATE", - "InputParameters": {} - }, - "Metadata": { - "aws:cdk:path": "CdkStack/API_GATEWAY_PRIVATE/Resource" - } - }, - "APIGATEWAYPRIVATERemediationConfiguration": { - "Type": "AWS::Config::RemediationConfiguration", - "Properties": { - "ConfigRuleName": "API_GATEWAY_PRIVATE", - "TargetId": "AWS-PublishSNSNotification", - "TargetType": "SSM_DOCUMENT", - "Automatic": true, - "MaximumAutomaticAttempts": 2, - "Parameters": { - "AutomationAssumeRole": { - "StaticValue": { - "Values": [ - { - "Fn::Sub": "arn:aws:iam::${AWS::AccountId}:role/sns-access" - } - ] - } - }, - "Message": { - "StaticValue": { - "Values": [ - "hi" - ] - } - }, - "TopicArn": { - "StaticValue": { - "Values": [ - { - "Fn::Sub": "arn:aws:sns:${AWS::Region}:${AWS::AccountId}:rules-notification" - } - ] - } - } - }, - "ResourceType": "AWS::EC2::Instance", - "RetryAttemptSeconds": 2, - "TargetVersion": "1" - }, - "Metadata": { - "aws:cdk:path": "CdkStack/API_GATEWAY_PRIVATERemediationConfiguration" - } - }, - "CDKMetadata": { - "Type": "AWS::CDK::Metadata", - "Properties": { - "Analytics": "v2:deflate64:H4sIAAAAAAAA/y2LQQ7CIBBFz9J9Ga1d6J4LNHgAg0B1LDAJDDFN07tXS1c/7738C1xvcG70NwtjJ+HxCcudtZnan3oYiiO+YJElM4WBPJq5lWOUu1fFuz8pF5xFzUhHKGmHtcZMJRl33CzWMsz8pnjqoeugbz4ZUaQSGYMDVXcDBAyx15oAAAA=" - }, - "Metadata": { - "aws:cdk:path": "CdkStack/CDKMetadata/Default" - }, - "Condition": "CDKMetadataAvailable" - } - }, - "Conditions": { - "CDKMetadataAvailable": { - "Fn::Or": [ - { - "Fn::Or": [ - { - "Fn::Equals": [ - { - "Ref": "AWS::Region" - }, - "af-south-1" - ] - }, - { - "Fn::Equals": [ - { - "Ref": "AWS::Region" - }, - "ap-east-1" - ] - }, - { - "Fn::Equals": [ - { - "Ref": "AWS::Region" - }, - "ap-northeast-1" - ] - }, - { - "Fn::Equals": [ - { - "Ref": "AWS::Region" - }, - "ap-northeast-2" - ] - }, - { - "Fn::Equals": [ - { - "Ref": "AWS::Region" - }, - "ap-south-1" - ] - }, - { - "Fn::Equals": [ - { - "Ref": "AWS::Region" - }, - "ap-southeast-1" - ] - }, - { - "Fn::Equals": [ - { - "Ref": "AWS::Region" - }, - "ap-southeast-2" - ] - }, - { - "Fn::Equals": [ - { - "Ref": "AWS::Region" - }, - "ca-central-1" - ] - }, - { - "Fn::Equals": [ - { - "Ref": "AWS::Region" - }, - "cn-north-1" - ] - }, - { - "Fn::Equals": [ - { - "Ref": "AWS::Region" - }, - "cn-northwest-1" - ] - } - ] - }, - { - "Fn::Or": [ - { - "Fn::Equals": [ - { - "Ref": "AWS::Region" - }, - "eu-central-1" - ] - }, - { - "Fn::Equals": [ - { - "Ref": "AWS::Region" - }, - "eu-north-1" - ] - }, - { - "Fn::Equals": [ - { - "Ref": "AWS::Region" - }, - "eu-south-1" - ] - }, - { - "Fn::Equals": [ - { - "Ref": "AWS::Region" - }, - "eu-west-1" - ] - }, - { - "Fn::Equals": [ - { - "Ref": "AWS::Region" - }, - "eu-west-2" - ] - }, - { - "Fn::Equals": [ - { - "Ref": "AWS::Region" - }, - "eu-west-3" - ] - }, - { - "Fn::Equals": [ - { - "Ref": "AWS::Region" - }, - "me-south-1" - ] - }, - { - "Fn::Equals": [ - { - "Ref": "AWS::Region" - }, - "sa-east-1" - ] - }, - { - "Fn::Equals": [ - { - "Ref": "AWS::Region" - }, - "us-east-1" - ] - }, - { - "Fn::Equals": [ - { - "Ref": "AWS::Region" - }, - "us-east-2" - ] - } - ] - }, - { - "Fn::Or": [ - { - "Fn::Equals": [ - { - "Ref": "AWS::Region" - }, - "us-west-1" - ] - }, - { - "Fn::Equals": [ - { - "Ref": "AWS::Region" - }, - "us-west-2" - ] - } - ] - } - ] - } - }, - "Parameters": { - "BootstrapVersion": { - "Type": "AWS::SSM::Parameter::Value", - "Default": "/cdk-bootstrap/hnb659fds/version", - "Description": "Version of the CDK Bootstrap resources in this environment, automatically retrieved from SSM Parameter Store. [cdk:skip]" - } - }, - "Rules": { - "CheckBootstrapVersion": { - "Assertions": [ - { - "Assert": { - "Fn::Not": [ - { - "Fn::Contains": [ - [ - "1", - "2", - "3", - "4", - "5" - ], - { - "Ref": "BootstrapVersion" - } - ] - } - ] - }, - "AssertDescription": "CDK bootstrap stack version 6 required. Please run 'cdk bootstrap' with a recent version of the CDK CLI." - } - ] - } - } -} \ No newline at end of file diff --git a/cdk.out/cdk.out b/cdk.out/cdk.out deleted file mode 100644 index 7925065e..00000000 --- a/cdk.out/cdk.out +++ /dev/null @@ -1 +0,0 @@ -{"version":"31.0.0"} \ No newline at end of file diff --git a/cdk.out/manifest.json b/cdk.out/manifest.json deleted file mode 100644 index f8a596d9..00000000 --- a/cdk.out/manifest.json +++ /dev/null @@ -1,167 +0,0 @@ -{ - "version": "31.0.0", - "artifacts": { - "CdkStack.assets": { - "type": "cdk:asset-manifest", - "properties": { - "file": "CdkStack.assets.json", - "requiresBootstrapStackVersion": 6, - "bootstrapStackVersionSsmParameter": "/cdk-bootstrap/hnb659fds/version" - } - }, - "CdkStack": { - "type": "aws:cloudformation:stack", - "environment": "aws://unknown-account/unknown-region", - "properties": { - "templateFile": "CdkStack.template.json", - "validateOnSynth": false, - "assumeRoleArn": "arn:${AWS::Partition}:iam::${AWS::AccountId}:role/cdk-hnb659fds-deploy-role-${AWS::AccountId}-${AWS::Region}", - "cloudFormationExecutionRoleArn": "arn:${AWS::Partition}:iam::${AWS::AccountId}:role/cdk-hnb659fds-cfn-exec-role-${AWS::AccountId}-${AWS::Region}", - "stackTemplateAssetObjectUrl": "s3://cdk-hnb659fds-assets-${AWS::AccountId}-${AWS::Region}/1f66503cc1455d71cbdf4d10ceddc706d2016dc01b701a1e11d88204bc108317.json", - "requiresBootstrapStackVersion": 6, - "bootstrapStackVersionSsmParameter": "/cdk-bootstrap/hnb659fds/version", - "additionalDependencies": [ - "CdkStack.assets" - ], - "lookupRole": { - "arn": "arn:${AWS::Partition}:iam::${AWS::AccountId}:role/cdk-hnb659fds-lookup-role-${AWS::AccountId}-${AWS::Region}", - "requiresBootstrapStackVersion": 8, - "bootstrapStackVersionSsmParameter": "/cdk-bootstrap/hnb659fds/version" - } - }, - "dependencies": [ - "CdkStack.assets" - ], - "metadata": { - "/CdkStack/API_GATEWAY_PRIVATE/Resource": [ - { - "type": "aws:cdk:logicalId", - "data": "APIGATEWAYPRIVATEC8B60F10", - "trace": [ - "new CustomPolicy (C:\\Users\\sirromb\\AppData\\Local\\Temp\\jsii-kernel-RpVQtO\\node_modules\\aws-cdk-lib\\aws-config\\lib\\rule.js:1:7302)", - "Kernel._create (C:\\Users\\sirromb\\AppData\\Local\\Temp\\tmppk0m2get\\lib\\program.js:9964:29)", - "Kernel.create (C:\\Users\\sirromb\\AppData\\Local\\Temp\\tmppk0m2get\\lib\\program.js:9693:29)", - "KernelHost.processRequest (C:\\Users\\sirromb\\AppData\\Local\\Temp\\tmppk0m2get\\lib\\program.js:11544:36)", - "KernelHost.run (C:\\Users\\sirromb\\AppData\\Local\\Temp\\tmppk0m2get\\lib\\program.js:11504:22)", - "Immediate._onImmediate (C:\\Users\\sirromb\\AppData\\Local\\Temp\\tmppk0m2get\\lib\\program.js:11505:46)", - "process.processImmediate (node:internal/timers:471:21)" - ] - } - ], - "/CdkStack/API_GATEWAY_PRIVATERemediationConfiguration": [ - { - "type": "aws:cdk:logicalId", - "data": "APIGATEWAYPRIVATERemediationConfiguration", - "trace": [ - "Kernel._create (C:\\Users\\sirromb\\AppData\\Local\\Temp\\tmppk0m2get\\lib\\program.js:9964:29)", - "Kernel.create (C:\\Users\\sirromb\\AppData\\Local\\Temp\\tmppk0m2get\\lib\\program.js:9693:29)", - "KernelHost.processRequest (C:\\Users\\sirromb\\AppData\\Local\\Temp\\tmppk0m2get\\lib\\program.js:11544:36)", - "KernelHost.run (C:\\Users\\sirromb\\AppData\\Local\\Temp\\tmppk0m2get\\lib\\program.js:11504:22)", - "Immediate._onImmediate (C:\\Users\\sirromb\\AppData\\Local\\Temp\\tmppk0m2get\\lib\\program.js:11505:46)", - "process.processImmediate (node:internal/timers:471:21)" - ] - } - ], - "/CdkStack/CDKMetadata/Default": [ - { - "type": "aws:cdk:logicalId", - "data": "CDKMetadata", - "trace": [ - "new MetadataResource (C:\\Users\\sirromb\\AppData\\Local\\Temp\\jsii-kernel-RpVQtO\\node_modules\\aws-cdk-lib\\core\\lib\\private\\metadata-resource.js:1:707)", - "C:\\Users\\sirromb\\AppData\\Local\\Temp\\jsii-kernel-RpVQtO\\node_modules\\aws-cdk-lib\\core\\lib\\private\\synthesis.js:2:2847", - "visit (C:\\Users\\sirromb\\AppData\\Local\\Temp\\jsii-kernel-RpVQtO\\node_modules\\aws-cdk-lib\\core\\lib\\private\\synthesis.js:4:368)", - "visit (C:\\Users\\sirromb\\AppData\\Local\\Temp\\jsii-kernel-RpVQtO\\node_modules\\aws-cdk-lib\\core\\lib\\private\\synthesis.js:4:330)", - "injectMetadataResources (C:\\Users\\sirromb\\AppData\\Local\\Temp\\jsii-kernel-RpVQtO\\node_modules\\aws-cdk-lib\\core\\lib\\private\\synthesis.js:2:2662)", - "synthesize (C:\\Users\\sirromb\\AppData\\Local\\Temp\\jsii-kernel-RpVQtO\\node_modules\\aws-cdk-lib\\core\\lib\\private\\synthesis.js:1:864)", - "App.synth (C:\\Users\\sirromb\\AppData\\Local\\Temp\\jsii-kernel-RpVQtO\\node_modules\\aws-cdk-lib\\core\\lib\\stage.js:1:2052)", - "C:\\Users\\sirromb\\AppData\\Local\\Temp\\tmppk0m2get\\lib\\program.js:9769:105", - "Kernel._ensureSync (C:\\Users\\sirromb\\AppData\\Local\\Temp\\tmppk0m2get\\lib\\program.js:10358:28)", - "Kernel.invoke (C:\\Users\\sirromb\\AppData\\Local\\Temp\\tmppk0m2get\\lib\\program.js:9769:34)", - "KernelHost.processRequest (C:\\Users\\sirromb\\AppData\\Local\\Temp\\tmppk0m2get\\lib\\program.js:11544:36)", - "KernelHost.run (C:\\Users\\sirromb\\AppData\\Local\\Temp\\tmppk0m2get\\lib\\program.js:11504:22)", - "Immediate._onImmediate (C:\\Users\\sirromb\\AppData\\Local\\Temp\\tmppk0m2get\\lib\\program.js:11505:46)", - "process.processImmediate (node:internal/timers:471:21)" - ] - } - ], - "/CdkStack/CDKMetadata/Condition": [ - { - "type": "aws:cdk:logicalId", - "data": "CDKMetadataAvailable", - "trace": [ - "new MetadataResource (C:\\Users\\sirromb\\AppData\\Local\\Temp\\jsii-kernel-RpVQtO\\node_modules\\aws-cdk-lib\\core\\lib\\private\\metadata-resource.js:1:966)", - "C:\\Users\\sirromb\\AppData\\Local\\Temp\\jsii-kernel-RpVQtO\\node_modules\\aws-cdk-lib\\core\\lib\\private\\synthesis.js:2:2847", - "visit (C:\\Users\\sirromb\\AppData\\Local\\Temp\\jsii-kernel-RpVQtO\\node_modules\\aws-cdk-lib\\core\\lib\\private\\synthesis.js:4:368)", - "visit (C:\\Users\\sirromb\\AppData\\Local\\Temp\\jsii-kernel-RpVQtO\\node_modules\\aws-cdk-lib\\core\\lib\\private\\synthesis.js:4:330)", - "injectMetadataResources (C:\\Users\\sirromb\\AppData\\Local\\Temp\\jsii-kernel-RpVQtO\\node_modules\\aws-cdk-lib\\core\\lib\\private\\synthesis.js:2:2662)", - "synthesize (C:\\Users\\sirromb\\AppData\\Local\\Temp\\jsii-kernel-RpVQtO\\node_modules\\aws-cdk-lib\\core\\lib\\private\\synthesis.js:1:864)", - "App.synth (C:\\Users\\sirromb\\AppData\\Local\\Temp\\jsii-kernel-RpVQtO\\node_modules\\aws-cdk-lib\\core\\lib\\stage.js:1:2052)", - "C:\\Users\\sirromb\\AppData\\Local\\Temp\\tmppk0m2get\\lib\\program.js:9769:105", - "Kernel._ensureSync (C:\\Users\\sirromb\\AppData\\Local\\Temp\\tmppk0m2get\\lib\\program.js:10358:28)", - "Kernel.invoke (C:\\Users\\sirromb\\AppData\\Local\\Temp\\tmppk0m2get\\lib\\program.js:9769:34)", - "KernelHost.processRequest (C:\\Users\\sirromb\\AppData\\Local\\Temp\\tmppk0m2get\\lib\\program.js:11544:36)", - "KernelHost.run (C:\\Users\\sirromb\\AppData\\Local\\Temp\\tmppk0m2get\\lib\\program.js:11504:22)", - "Immediate._onImmediate (C:\\Users\\sirromb\\AppData\\Local\\Temp\\tmppk0m2get\\lib\\program.js:11505:46)", - "process.processImmediate (node:internal/timers:471:21)" - ] - } - ], - "/CdkStack/BootstrapVersion": [ - { - "type": "aws:cdk:logicalId", - "data": "BootstrapVersion", - "trace": [ - "addBootstrapVersionRule (C:\\Users\\sirromb\\AppData\\Local\\Temp\\jsii-kernel-RpVQtO\\node_modules\\aws-cdk-lib\\core\\lib\\stack-synthesizers\\stack-synthesizer.js:1:5072)", - "DefaultStackSynthesizer.addBootstrapVersionRule (C:\\Users\\sirromb\\AppData\\Local\\Temp\\jsii-kernel-RpVQtO\\node_modules\\aws-cdk-lib\\core\\lib\\stack-synthesizers\\stack-synthesizer.js:1:2792)", - "DefaultStackSynthesizer.synthesize (C:\\Users\\sirromb\\AppData\\Local\\Temp\\jsii-kernel-RpVQtO\\node_modules\\aws-cdk-lib\\core\\lib\\stack-synthesizers\\default-synthesizer.js:1:5963)", - "C:\\Users\\sirromb\\AppData\\Local\\Temp\\jsii-kernel-RpVQtO\\node_modules\\aws-cdk-lib\\core\\lib\\private\\synthesis.js:2:3375", - "visit (C:\\Users\\sirromb\\AppData\\Local\\Temp\\jsii-kernel-RpVQtO\\node_modules\\aws-cdk-lib\\core\\lib\\private\\synthesis.js:4:368)", - "visit (C:\\Users\\sirromb\\AppData\\Local\\Temp\\jsii-kernel-RpVQtO\\node_modules\\aws-cdk-lib\\core\\lib\\private\\synthesis.js:4:330)", - "synthesizeTree (C:\\Users\\sirromb\\AppData\\Local\\Temp\\jsii-kernel-RpVQtO\\node_modules\\aws-cdk-lib\\core\\lib\\private\\synthesis.js:2:3219)", - "synthesize (C:\\Users\\sirromb\\AppData\\Local\\Temp\\jsii-kernel-RpVQtO\\node_modules\\aws-cdk-lib\\core\\lib\\private\\synthesis.js:1:1083)", - "App.synth (C:\\Users\\sirromb\\AppData\\Local\\Temp\\jsii-kernel-RpVQtO\\node_modules\\aws-cdk-lib\\core\\lib\\stage.js:1:2052)", - "C:\\Users\\sirromb\\AppData\\Local\\Temp\\tmppk0m2get\\lib\\program.js:9769:105", - "Kernel._ensureSync (C:\\Users\\sirromb\\AppData\\Local\\Temp\\tmppk0m2get\\lib\\program.js:10358:28)", - "Kernel.invoke (C:\\Users\\sirromb\\AppData\\Local\\Temp\\tmppk0m2get\\lib\\program.js:9769:34)", - "KernelHost.processRequest (C:\\Users\\sirromb\\AppData\\Local\\Temp\\tmppk0m2get\\lib\\program.js:11544:36)", - "KernelHost.run (C:\\Users\\sirromb\\AppData\\Local\\Temp\\tmppk0m2get\\lib\\program.js:11504:22)", - "Immediate._onImmediate (C:\\Users\\sirromb\\AppData\\Local\\Temp\\tmppk0m2get\\lib\\program.js:11505:46)", - "process.processImmediate (node:internal/timers:471:21)" - ] - } - ], - "/CdkStack/CheckBootstrapVersion": [ - { - "type": "aws:cdk:logicalId", - "data": "CheckBootstrapVersion", - "trace": [ - "addBootstrapVersionRule (C:\\Users\\sirromb\\AppData\\Local\\Temp\\jsii-kernel-RpVQtO\\node_modules\\aws-cdk-lib\\core\\lib\\stack-synthesizers\\stack-synthesizer.js:1:5425)", - "DefaultStackSynthesizer.addBootstrapVersionRule (C:\\Users\\sirromb\\AppData\\Local\\Temp\\jsii-kernel-RpVQtO\\node_modules\\aws-cdk-lib\\core\\lib\\stack-synthesizers\\stack-synthesizer.js:1:2792)", - "DefaultStackSynthesizer.synthesize (C:\\Users\\sirromb\\AppData\\Local\\Temp\\jsii-kernel-RpVQtO\\node_modules\\aws-cdk-lib\\core\\lib\\stack-synthesizers\\default-synthesizer.js:1:5963)", - "C:\\Users\\sirromb\\AppData\\Local\\Temp\\jsii-kernel-RpVQtO\\node_modules\\aws-cdk-lib\\core\\lib\\private\\synthesis.js:2:3375", - "visit (C:\\Users\\sirromb\\AppData\\Local\\Temp\\jsii-kernel-RpVQtO\\node_modules\\aws-cdk-lib\\core\\lib\\private\\synthesis.js:4:368)", - "visit (C:\\Users\\sirromb\\AppData\\Local\\Temp\\jsii-kernel-RpVQtO\\node_modules\\aws-cdk-lib\\core\\lib\\private\\synthesis.js:4:330)", - "synthesizeTree (C:\\Users\\sirromb\\AppData\\Local\\Temp\\jsii-kernel-RpVQtO\\node_modules\\aws-cdk-lib\\core\\lib\\private\\synthesis.js:2:3219)", - "synthesize (C:\\Users\\sirromb\\AppData\\Local\\Temp\\jsii-kernel-RpVQtO\\node_modules\\aws-cdk-lib\\core\\lib\\private\\synthesis.js:1:1083)", - "App.synth (C:\\Users\\sirromb\\AppData\\Local\\Temp\\jsii-kernel-RpVQtO\\node_modules\\aws-cdk-lib\\core\\lib\\stage.js:1:2052)", - "C:\\Users\\sirromb\\AppData\\Local\\Temp\\tmppk0m2get\\lib\\program.js:9769:105", - "Kernel._ensureSync (C:\\Users\\sirromb\\AppData\\Local\\Temp\\tmppk0m2get\\lib\\program.js:10358:28)", - "Kernel.invoke (C:\\Users\\sirromb\\AppData\\Local\\Temp\\tmppk0m2get\\lib\\program.js:9769:34)", - "KernelHost.processRequest (C:\\Users\\sirromb\\AppData\\Local\\Temp\\tmppk0m2get\\lib\\program.js:11544:36)", - "KernelHost.run (C:\\Users\\sirromb\\AppData\\Local\\Temp\\tmppk0m2get\\lib\\program.js:11504:22)", - "Immediate._onImmediate (C:\\Users\\sirromb\\AppData\\Local\\Temp\\tmppk0m2get\\lib\\program.js:11505:46)", - "process.processImmediate (node:internal/timers:471:21)" - ] - } - ] - }, - "displayName": "CdkStack" - }, - "Tree": { - "type": "cdk:tree", - "properties": { - "file": "tree.json" - } - } - } -} \ No newline at end of file diff --git a/cdk.out/tree.json b/cdk.out/tree.json deleted file mode 100644 index 34a9965a..00000000 --- a/cdk.out/tree.json +++ /dev/null @@ -1,165 +0,0 @@ -{ - "version": "tree-0.1", - "tree": { - "id": "App", - "path": "", - "children": { - "CdkStack": { - "id": "CdkStack", - "path": "CdkStack", - "children": { - "API_GATEWAY_PRIVATE": { - "id": "API_GATEWAY_PRIVATE", - "path": "CdkStack/API_GATEWAY_PRIVATE", - "children": { - "Resource": { - "id": "Resource", - "path": "CdkStack/API_GATEWAY_PRIVATE/Resource", - "attributes": { - "aws:cdk:cloudformation:type": "AWS::Config::ConfigRule", - "aws:cdk:cloudformation:props": { - "source": { - "owner": "CUSTOM_POLICY", - "sourceDetails": [ - { - "eventSource": "aws.config", - "messageType": "ConfigurationItemChangeNotification" - }, - { - "eventSource": "aws.config", - "messageType": "OversizedConfigurationItemChangeNotification" - } - ], - "customPolicyDetails": { - "enableDebugLogDelivery": false, - "policyRuntime": "guard-2.x.x", - "policyText": "#\n# Select all AWS::ApiGateway::RestApi resources\n# present in the Resources section of the template. \n#\nlet api_gws = Resources.*[ Type == 'AWS::ApiGateway::RestApi']\n\n#\n# Rule intent: \n# 1) All AWS::ApiGateway::RestApi resources deployed must be private. \n# 2) All AWS::ApiGateway::RestApi resources deployed must have at least one AWS Identity and Access Management (IAM) policy condition key to allow access from a VPC.\n#\n# Expectations: \n# 1) SKIP when there are no AWS::ApiGateway::RestApi resources in the template. \n# 2) PASS when:\n# ALL AWS::ApiGateway::RestApi resources in the template have the EndpointConfiguration property set to Type: PRIVATE. \n# ALL AWS::ApiGateway::RestApi resources in the template have one IAM condition key specified in the Policy property with aws:sourceVpc or :SourceVpc. \n# 3) FAIL otherwise. \n#\n#\n\nrule check_rest_api_is_private when %api_gws !empty { \n %api_gws {\n Properties.EndpointConfiguration.Types[*] == \"PRIVATE\" \n } \n} \n\nrule check_rest_api_has_vpc_access when check_rest_api_is_private {\n %api_gws {\n Properties {\n #\n # ALL AWS::ApiGateway::RestApi resources in the template have one IAM condition key specified in the Policy property with \n # aws:sourceVpc or :SourceVpc\n # \n some Policy.Statement[*] {\n Condition.*[ keys == /aws:[sS]ource(Vpc|VPC|Vpce|VPCE)/ ] !empty\n }\n }\n }\n}" - } - }, - "configRuleName": "API_GATEWAY_PRIVATE", - "description": "API_GATEWAY_PRIVATE", - "inputParameters": {} - } - }, - "constructInfo": { - "fqn": "aws-cdk-lib.aws_config.CfnConfigRule", - "version": "2.78.0" - } - } - }, - "constructInfo": { - "fqn": "aws-cdk-lib.aws_config.CustomPolicy", - "version": "2.78.0" - } - }, - "API_GATEWAY_PRIVATERemediationConfiguration": { - "id": "API_GATEWAY_PRIVATERemediationConfiguration", - "path": "CdkStack/API_GATEWAY_PRIVATERemediationConfiguration", - "attributes": { - "aws:cdk:cloudformation:type": "AWS::Config::RemediationConfiguration", - "aws:cdk:cloudformation:props": { - "configRuleName": "API_GATEWAY_PRIVATE", - "targetId": "AWS-PublishSNSNotification", - "targetType": "SSM_DOCUMENT", - "automatic": true, - "maximumAutomaticAttempts": 2, - "parameters": { - "AutomationAssumeRole": { - "StaticValue": { - "Values": [ - { - "Fn::Sub": "arn:aws:iam::${AWS::AccountId}:role/sns-access" - } - ] - } - }, - "Message": { - "StaticValue": { - "Values": [ - "hi" - ] - } - }, - "TopicArn": { - "StaticValue": { - "Values": [ - { - "Fn::Sub": "arn:aws:sns:${AWS::Region}:${AWS::AccountId}:rules-notification" - } - ] - } - } - }, - "resourceType": "AWS::EC2::Instance", - "retryAttemptSeconds": 2, - "targetVersion": "1" - } - }, - "constructInfo": { - "fqn": "aws-cdk-lib.aws_config.CfnRemediationConfiguration", - "version": "2.78.0" - } - }, - "CDKMetadata": { - "id": "CDKMetadata", - "path": "CdkStack/CDKMetadata", - "children": { - "Default": { - "id": "Default", - "path": "CdkStack/CDKMetadata/Default", - "constructInfo": { - "fqn": "aws-cdk-lib.CfnResource", - "version": "2.78.0" - } - }, - "Condition": { - "id": "Condition", - "path": "CdkStack/CDKMetadata/Condition", - "constructInfo": { - "fqn": "aws-cdk-lib.CfnCondition", - "version": "2.78.0" - } - } - }, - "constructInfo": { - "fqn": "constructs.Construct", - "version": "10.2.17" - } - }, - "BootstrapVersion": { - "id": "BootstrapVersion", - "path": "CdkStack/BootstrapVersion", - "constructInfo": { - "fqn": "aws-cdk-lib.CfnParameter", - "version": "2.78.0" - } - }, - "CheckBootstrapVersion": { - "id": "CheckBootstrapVersion", - "path": "CdkStack/CheckBootstrapVersion", - "constructInfo": { - "fqn": "aws-cdk-lib.CfnRule", - "version": "2.78.0" - } - } - }, - "constructInfo": { - "fqn": "aws-cdk-lib.Stack", - "version": "2.78.0" - } - }, - "Tree": { - "id": "Tree", - "path": "Tree", - "constructInfo": { - "fqn": "constructs.Construct", - "version": "10.2.17" - } - } - }, - "constructInfo": { - "fqn": "aws-cdk-lib.App", - "version": "2.78.0" - } - } -} \ No newline at end of file From fc4f1be8a52cf30659898049ed24648c54be2ae4 Mon Sep 17 00:00:00 2001 From: Benjamin Morris Date: Tue, 9 May 2023 09:17:49 -0700 Subject: [PATCH 16/23] update gitignore --- .gitignore | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/.gitignore b/.gitignore index 35ed8589..daedcabb 100644 --- a/.gitignore +++ b/.gitignore @@ -370,4 +370,5 @@ docs/CHANGELOG.md # CDK **/cdk.out/ -cdk.out/ \ No newline at end of file +cdk.out +cdk.out/** \ No newline at end of file From 7a30b3dd96ef0049ad7e52a6be970750eae74f1b Mon Sep 17 00:00:00 2001 From: Benjamin Morris Date: Tue, 9 May 2023 09:20:38 -0700 Subject: [PATCH 17/23] remove cdkout --- .gitignore | 1 + cdk.out/CdkStack.assets.json | 19 -- cdk.out/CdkStack.template.json | 314 --------------------------------- cdk.out/cdk.out | 1 - cdk.out/manifest.json | 167 ------------------ cdk.out/tree.json | 165 ----------------- 6 files changed, 1 insertion(+), 666 deletions(-) delete mode 100644 cdk.out/CdkStack.assets.json delete mode 100644 cdk.out/CdkStack.template.json delete mode 100644 cdk.out/cdk.out delete mode 100644 cdk.out/manifest.json delete mode 100644 cdk.out/tree.json diff --git a/.gitignore b/.gitignore index daedcabb..e3107333 100644 --- a/.gitignore +++ b/.gitignore @@ -371,4 +371,5 @@ docs/CHANGELOG.md # CDK **/cdk.out/ cdk.out +cdk.out/ cdk.out/** \ No newline at end of file diff --git a/cdk.out/CdkStack.assets.json b/cdk.out/CdkStack.assets.json deleted file mode 100644 index 0f012846..00000000 --- a/cdk.out/CdkStack.assets.json +++ /dev/null @@ -1,19 +0,0 @@ -{ - "version": "31.0.0", - "files": { - "1f66503cc1455d71cbdf4d10ceddc706d2016dc01b701a1e11d88204bc108317": { - "source": { - "path": "CdkStack.template.json", - "packaging": "file" - }, - "destinations": { - "current_account-current_region": { - "bucketName": "cdk-hnb659fds-assets-${AWS::AccountId}-${AWS::Region}", - "objectKey": "1f66503cc1455d71cbdf4d10ceddc706d2016dc01b701a1e11d88204bc108317.json", - "assumeRoleArn": "arn:${AWS::Partition}:iam::${AWS::AccountId}:role/cdk-hnb659fds-file-publishing-role-${AWS::AccountId}-${AWS::Region}" - } - } - } - }, - "dockerImages": {} -} \ No newline at end of file diff --git a/cdk.out/CdkStack.template.json b/cdk.out/CdkStack.template.json deleted file mode 100644 index 204a2811..00000000 --- a/cdk.out/CdkStack.template.json +++ /dev/null @@ -1,314 +0,0 @@ -{ - "Resources": { - "APIGATEWAYPRIVATEC8B60F10": { - "Type": "AWS::Config::ConfigRule", - "Properties": { - "Source": { - "CustomPolicyDetails": { - "EnableDebugLogDelivery": false, - "PolicyRuntime": "guard-2.x.x", - "PolicyText": "#\n# Select all AWS::ApiGateway::RestApi resources\n# present in the Resources section of the template. \n#\nlet api_gws = Resources.*[ Type == 'AWS::ApiGateway::RestApi']\n\n#\n# Rule intent: \n# 1) All AWS::ApiGateway::RestApi resources deployed must be private. \n# 2) All AWS::ApiGateway::RestApi resources deployed must have at least one AWS Identity and Access Management (IAM) policy condition key to allow access from a VPC.\n#\n# Expectations: \n# 1) SKIP when there are no AWS::ApiGateway::RestApi resources in the template. \n# 2) PASS when:\n# ALL AWS::ApiGateway::RestApi resources in the template have the EndpointConfiguration property set to Type: PRIVATE. \n# ALL AWS::ApiGateway::RestApi resources in the template have one IAM condition key specified in the Policy property with aws:sourceVpc or :SourceVpc. \n# 3) FAIL otherwise. \n#\n#\n\nrule check_rest_api_is_private when %api_gws !empty { \n %api_gws {\n Properties.EndpointConfiguration.Types[*] == \"PRIVATE\" \n } \n} \n\nrule check_rest_api_has_vpc_access when check_rest_api_is_private {\n %api_gws {\n Properties {\n #\n # ALL AWS::ApiGateway::RestApi resources in the template have one IAM condition key specified in the Policy property with \n # aws:sourceVpc or :SourceVpc\n # \n some Policy.Statement[*] {\n Condition.*[ keys == /aws:[sS]ource(Vpc|VPC|Vpce|VPCE)/ ] !empty\n }\n }\n }\n}" - }, - "Owner": "CUSTOM_POLICY", - "SourceDetails": [ - { - "EventSource": "aws.config", - "MessageType": "ConfigurationItemChangeNotification" - }, - { - "EventSource": "aws.config", - "MessageType": "OversizedConfigurationItemChangeNotification" - } - ] - }, - "ConfigRuleName": "API_GATEWAY_PRIVATE", - "Description": "API_GATEWAY_PRIVATE", - "InputParameters": {} - }, - "Metadata": { - "aws:cdk:path": "CdkStack/API_GATEWAY_PRIVATE/Resource" - } - }, - "APIGATEWAYPRIVATERemediationConfiguration": { - "Type": "AWS::Config::RemediationConfiguration", - "Properties": { - "ConfigRuleName": "API_GATEWAY_PRIVATE", - "TargetId": "AWS-PublishSNSNotification", - "TargetType": "SSM_DOCUMENT", - "Automatic": true, - "MaximumAutomaticAttempts": 2, - "Parameters": { - "AutomationAssumeRole": { - "StaticValue": { - "Values": [ - { - "Fn::Sub": "arn:aws:iam::${AWS::AccountId}:role/sns-access" - } - ] - } - }, - "Message": { - "StaticValue": { - "Values": [ - "hi" - ] - } - }, - "TopicArn": { - "StaticValue": { - "Values": [ - { - "Fn::Sub": "arn:aws:sns:${AWS::Region}:${AWS::AccountId}:rules-notification" - } - ] - } - } - }, - "ResourceType": "AWS::EC2::Instance", - "RetryAttemptSeconds": 2, - "TargetVersion": "1" - }, - "Metadata": { - "aws:cdk:path": "CdkStack/API_GATEWAY_PRIVATERemediationConfiguration" - } - }, - "CDKMetadata": { - "Type": "AWS::CDK::Metadata", - "Properties": { - "Analytics": "v2:deflate64:H4sIAAAAAAAA/y2LQQ7CIBBFz9J9Ga1d6J4LNHgAg0B1LDAJDDFN07tXS1c/7738C1xvcG70NwtjJ+HxCcudtZnan3oYiiO+YJElM4WBPJq5lWOUu1fFuz8pF5xFzUhHKGmHtcZMJRl33CzWMsz8pnjqoeugbz4ZUaQSGYMDVXcDBAyx15oAAAA=" - }, - "Metadata": { - "aws:cdk:path": "CdkStack/CDKMetadata/Default" - }, - "Condition": "CDKMetadataAvailable" - } - }, - "Conditions": { - "CDKMetadataAvailable": { - "Fn::Or": [ - { - "Fn::Or": [ - { - "Fn::Equals": [ - { - "Ref": "AWS::Region" - }, - "af-south-1" - ] - }, - { - "Fn::Equals": [ - { - "Ref": "AWS::Region" - }, - "ap-east-1" - ] - }, - { - "Fn::Equals": [ - { - "Ref": "AWS::Region" - }, - "ap-northeast-1" - ] - }, - { - "Fn::Equals": [ - { - "Ref": "AWS::Region" - }, - "ap-northeast-2" - ] - }, - { - "Fn::Equals": [ - { - "Ref": "AWS::Region" - }, - "ap-south-1" - ] - }, - { - "Fn::Equals": [ - { - "Ref": "AWS::Region" - }, - "ap-southeast-1" - ] - }, - { - "Fn::Equals": [ - { - "Ref": "AWS::Region" - }, - "ap-southeast-2" - ] - }, - { - "Fn::Equals": [ - { - "Ref": "AWS::Region" - }, - "ca-central-1" - ] - }, - { - "Fn::Equals": [ - { - "Ref": "AWS::Region" - }, - "cn-north-1" - ] - }, - { - "Fn::Equals": [ - { - "Ref": "AWS::Region" - }, - "cn-northwest-1" - ] - } - ] - }, - { - "Fn::Or": [ - { - "Fn::Equals": [ - { - "Ref": "AWS::Region" - }, - "eu-central-1" - ] - }, - { - "Fn::Equals": [ - { - "Ref": "AWS::Region" - }, - "eu-north-1" - ] - }, - { - "Fn::Equals": [ - { - "Ref": "AWS::Region" - }, - "eu-south-1" - ] - }, - { - "Fn::Equals": [ - { - "Ref": "AWS::Region" - }, - "eu-west-1" - ] - }, - { - "Fn::Equals": [ - { - "Ref": "AWS::Region" - }, - "eu-west-2" - ] - }, - { - "Fn::Equals": [ - { - "Ref": "AWS::Region" - }, - "eu-west-3" - ] - }, - { - "Fn::Equals": [ - { - "Ref": "AWS::Region" - }, - "me-south-1" - ] - }, - { - "Fn::Equals": [ - { - "Ref": "AWS::Region" - }, - "sa-east-1" - ] - }, - { - "Fn::Equals": [ - { - "Ref": "AWS::Region" - }, - "us-east-1" - ] - }, - { - "Fn::Equals": [ - { - "Ref": "AWS::Region" - }, - "us-east-2" - ] - } - ] - }, - { - "Fn::Or": [ - { - "Fn::Equals": [ - { - "Ref": "AWS::Region" - }, - "us-west-1" - ] - }, - { - "Fn::Equals": [ - { - "Ref": "AWS::Region" - }, - "us-west-2" - ] - } - ] - } - ] - } - }, - "Parameters": { - "BootstrapVersion": { - "Type": "AWS::SSM::Parameter::Value", - "Default": "/cdk-bootstrap/hnb659fds/version", - "Description": "Version of the CDK Bootstrap resources in this environment, automatically retrieved from SSM Parameter Store. [cdk:skip]" - } - }, - "Rules": { - "CheckBootstrapVersion": { - "Assertions": [ - { - "Assert": { - "Fn::Not": [ - { - "Fn::Contains": [ - [ - "1", - "2", - "3", - "4", - "5" - ], - { - "Ref": "BootstrapVersion" - } - ] - } - ] - }, - "AssertDescription": "CDK bootstrap stack version 6 required. Please run 'cdk bootstrap' with a recent version of the CDK CLI." - } - ] - } - } -} \ No newline at end of file diff --git a/cdk.out/cdk.out b/cdk.out/cdk.out deleted file mode 100644 index 7925065e..00000000 --- a/cdk.out/cdk.out +++ /dev/null @@ -1 +0,0 @@ -{"version":"31.0.0"} \ No newline at end of file diff --git a/cdk.out/manifest.json b/cdk.out/manifest.json deleted file mode 100644 index f8a596d9..00000000 --- a/cdk.out/manifest.json +++ /dev/null @@ -1,167 +0,0 @@ -{ - "version": "31.0.0", - "artifacts": { - "CdkStack.assets": { - "type": "cdk:asset-manifest", - "properties": { - "file": "CdkStack.assets.json", - "requiresBootstrapStackVersion": 6, - "bootstrapStackVersionSsmParameter": "/cdk-bootstrap/hnb659fds/version" - } - }, - "CdkStack": { - "type": "aws:cloudformation:stack", - "environment": "aws://unknown-account/unknown-region", - "properties": { - "templateFile": "CdkStack.template.json", - "validateOnSynth": false, - "assumeRoleArn": "arn:${AWS::Partition}:iam::${AWS::AccountId}:role/cdk-hnb659fds-deploy-role-${AWS::AccountId}-${AWS::Region}", - "cloudFormationExecutionRoleArn": "arn:${AWS::Partition}:iam::${AWS::AccountId}:role/cdk-hnb659fds-cfn-exec-role-${AWS::AccountId}-${AWS::Region}", - "stackTemplateAssetObjectUrl": "s3://cdk-hnb659fds-assets-${AWS::AccountId}-${AWS::Region}/1f66503cc1455d71cbdf4d10ceddc706d2016dc01b701a1e11d88204bc108317.json", - "requiresBootstrapStackVersion": 6, - "bootstrapStackVersionSsmParameter": "/cdk-bootstrap/hnb659fds/version", - "additionalDependencies": [ - "CdkStack.assets" - ], - "lookupRole": { - "arn": "arn:${AWS::Partition}:iam::${AWS::AccountId}:role/cdk-hnb659fds-lookup-role-${AWS::AccountId}-${AWS::Region}", - "requiresBootstrapStackVersion": 8, - "bootstrapStackVersionSsmParameter": "/cdk-bootstrap/hnb659fds/version" - } - }, - "dependencies": [ - "CdkStack.assets" - ], - "metadata": { - "/CdkStack/API_GATEWAY_PRIVATE/Resource": [ - { - "type": "aws:cdk:logicalId", - "data": "APIGATEWAYPRIVATEC8B60F10", - "trace": [ - "new CustomPolicy (C:\\Users\\sirromb\\AppData\\Local\\Temp\\jsii-kernel-RpVQtO\\node_modules\\aws-cdk-lib\\aws-config\\lib\\rule.js:1:7302)", - "Kernel._create (C:\\Users\\sirromb\\AppData\\Local\\Temp\\tmppk0m2get\\lib\\program.js:9964:29)", - "Kernel.create (C:\\Users\\sirromb\\AppData\\Local\\Temp\\tmppk0m2get\\lib\\program.js:9693:29)", - "KernelHost.processRequest (C:\\Users\\sirromb\\AppData\\Local\\Temp\\tmppk0m2get\\lib\\program.js:11544:36)", - "KernelHost.run (C:\\Users\\sirromb\\AppData\\Local\\Temp\\tmppk0m2get\\lib\\program.js:11504:22)", - "Immediate._onImmediate (C:\\Users\\sirromb\\AppData\\Local\\Temp\\tmppk0m2get\\lib\\program.js:11505:46)", - "process.processImmediate (node:internal/timers:471:21)" - ] - } - ], - "/CdkStack/API_GATEWAY_PRIVATERemediationConfiguration": [ - { - "type": "aws:cdk:logicalId", - "data": "APIGATEWAYPRIVATERemediationConfiguration", - "trace": [ - "Kernel._create (C:\\Users\\sirromb\\AppData\\Local\\Temp\\tmppk0m2get\\lib\\program.js:9964:29)", - "Kernel.create (C:\\Users\\sirromb\\AppData\\Local\\Temp\\tmppk0m2get\\lib\\program.js:9693:29)", - "KernelHost.processRequest (C:\\Users\\sirromb\\AppData\\Local\\Temp\\tmppk0m2get\\lib\\program.js:11544:36)", - "KernelHost.run (C:\\Users\\sirromb\\AppData\\Local\\Temp\\tmppk0m2get\\lib\\program.js:11504:22)", - "Immediate._onImmediate (C:\\Users\\sirromb\\AppData\\Local\\Temp\\tmppk0m2get\\lib\\program.js:11505:46)", - "process.processImmediate (node:internal/timers:471:21)" - ] - } - ], - "/CdkStack/CDKMetadata/Default": [ - { - "type": "aws:cdk:logicalId", - "data": "CDKMetadata", - "trace": [ - "new MetadataResource (C:\\Users\\sirromb\\AppData\\Local\\Temp\\jsii-kernel-RpVQtO\\node_modules\\aws-cdk-lib\\core\\lib\\private\\metadata-resource.js:1:707)", - "C:\\Users\\sirromb\\AppData\\Local\\Temp\\jsii-kernel-RpVQtO\\node_modules\\aws-cdk-lib\\core\\lib\\private\\synthesis.js:2:2847", - "visit (C:\\Users\\sirromb\\AppData\\Local\\Temp\\jsii-kernel-RpVQtO\\node_modules\\aws-cdk-lib\\core\\lib\\private\\synthesis.js:4:368)", - "visit (C:\\Users\\sirromb\\AppData\\Local\\Temp\\jsii-kernel-RpVQtO\\node_modules\\aws-cdk-lib\\core\\lib\\private\\synthesis.js:4:330)", - "injectMetadataResources (C:\\Users\\sirromb\\AppData\\Local\\Temp\\jsii-kernel-RpVQtO\\node_modules\\aws-cdk-lib\\core\\lib\\private\\synthesis.js:2:2662)", - "synthesize (C:\\Users\\sirromb\\AppData\\Local\\Temp\\jsii-kernel-RpVQtO\\node_modules\\aws-cdk-lib\\core\\lib\\private\\synthesis.js:1:864)", - "App.synth (C:\\Users\\sirromb\\AppData\\Local\\Temp\\jsii-kernel-RpVQtO\\node_modules\\aws-cdk-lib\\core\\lib\\stage.js:1:2052)", - "C:\\Users\\sirromb\\AppData\\Local\\Temp\\tmppk0m2get\\lib\\program.js:9769:105", - "Kernel._ensureSync (C:\\Users\\sirromb\\AppData\\Local\\Temp\\tmppk0m2get\\lib\\program.js:10358:28)", - "Kernel.invoke (C:\\Users\\sirromb\\AppData\\Local\\Temp\\tmppk0m2get\\lib\\program.js:9769:34)", - "KernelHost.processRequest (C:\\Users\\sirromb\\AppData\\Local\\Temp\\tmppk0m2get\\lib\\program.js:11544:36)", - "KernelHost.run (C:\\Users\\sirromb\\AppData\\Local\\Temp\\tmppk0m2get\\lib\\program.js:11504:22)", - "Immediate._onImmediate (C:\\Users\\sirromb\\AppData\\Local\\Temp\\tmppk0m2get\\lib\\program.js:11505:46)", - "process.processImmediate (node:internal/timers:471:21)" - ] - } - ], - "/CdkStack/CDKMetadata/Condition": [ - { - "type": "aws:cdk:logicalId", - "data": "CDKMetadataAvailable", - "trace": [ - "new MetadataResource (C:\\Users\\sirromb\\AppData\\Local\\Temp\\jsii-kernel-RpVQtO\\node_modules\\aws-cdk-lib\\core\\lib\\private\\metadata-resource.js:1:966)", - "C:\\Users\\sirromb\\AppData\\Local\\Temp\\jsii-kernel-RpVQtO\\node_modules\\aws-cdk-lib\\core\\lib\\private\\synthesis.js:2:2847", - "visit (C:\\Users\\sirromb\\AppData\\Local\\Temp\\jsii-kernel-RpVQtO\\node_modules\\aws-cdk-lib\\core\\lib\\private\\synthesis.js:4:368)", - "visit (C:\\Users\\sirromb\\AppData\\Local\\Temp\\jsii-kernel-RpVQtO\\node_modules\\aws-cdk-lib\\core\\lib\\private\\synthesis.js:4:330)", - "injectMetadataResources (C:\\Users\\sirromb\\AppData\\Local\\Temp\\jsii-kernel-RpVQtO\\node_modules\\aws-cdk-lib\\core\\lib\\private\\synthesis.js:2:2662)", - "synthesize (C:\\Users\\sirromb\\AppData\\Local\\Temp\\jsii-kernel-RpVQtO\\node_modules\\aws-cdk-lib\\core\\lib\\private\\synthesis.js:1:864)", - "App.synth (C:\\Users\\sirromb\\AppData\\Local\\Temp\\jsii-kernel-RpVQtO\\node_modules\\aws-cdk-lib\\core\\lib\\stage.js:1:2052)", - "C:\\Users\\sirromb\\AppData\\Local\\Temp\\tmppk0m2get\\lib\\program.js:9769:105", - "Kernel._ensureSync (C:\\Users\\sirromb\\AppData\\Local\\Temp\\tmppk0m2get\\lib\\program.js:10358:28)", - "Kernel.invoke (C:\\Users\\sirromb\\AppData\\Local\\Temp\\tmppk0m2get\\lib\\program.js:9769:34)", - "KernelHost.processRequest (C:\\Users\\sirromb\\AppData\\Local\\Temp\\tmppk0m2get\\lib\\program.js:11544:36)", - "KernelHost.run (C:\\Users\\sirromb\\AppData\\Local\\Temp\\tmppk0m2get\\lib\\program.js:11504:22)", - "Immediate._onImmediate (C:\\Users\\sirromb\\AppData\\Local\\Temp\\tmppk0m2get\\lib\\program.js:11505:46)", - "process.processImmediate (node:internal/timers:471:21)" - ] - } - ], - "/CdkStack/BootstrapVersion": [ - { - "type": "aws:cdk:logicalId", - "data": "BootstrapVersion", - "trace": [ - "addBootstrapVersionRule (C:\\Users\\sirromb\\AppData\\Local\\Temp\\jsii-kernel-RpVQtO\\node_modules\\aws-cdk-lib\\core\\lib\\stack-synthesizers\\stack-synthesizer.js:1:5072)", - "DefaultStackSynthesizer.addBootstrapVersionRule (C:\\Users\\sirromb\\AppData\\Local\\Temp\\jsii-kernel-RpVQtO\\node_modules\\aws-cdk-lib\\core\\lib\\stack-synthesizers\\stack-synthesizer.js:1:2792)", - "DefaultStackSynthesizer.synthesize (C:\\Users\\sirromb\\AppData\\Local\\Temp\\jsii-kernel-RpVQtO\\node_modules\\aws-cdk-lib\\core\\lib\\stack-synthesizers\\default-synthesizer.js:1:5963)", - "C:\\Users\\sirromb\\AppData\\Local\\Temp\\jsii-kernel-RpVQtO\\node_modules\\aws-cdk-lib\\core\\lib\\private\\synthesis.js:2:3375", - "visit (C:\\Users\\sirromb\\AppData\\Local\\Temp\\jsii-kernel-RpVQtO\\node_modules\\aws-cdk-lib\\core\\lib\\private\\synthesis.js:4:368)", - "visit (C:\\Users\\sirromb\\AppData\\Local\\Temp\\jsii-kernel-RpVQtO\\node_modules\\aws-cdk-lib\\core\\lib\\private\\synthesis.js:4:330)", - "synthesizeTree (C:\\Users\\sirromb\\AppData\\Local\\Temp\\jsii-kernel-RpVQtO\\node_modules\\aws-cdk-lib\\core\\lib\\private\\synthesis.js:2:3219)", - "synthesize (C:\\Users\\sirromb\\AppData\\Local\\Temp\\jsii-kernel-RpVQtO\\node_modules\\aws-cdk-lib\\core\\lib\\private\\synthesis.js:1:1083)", - "App.synth (C:\\Users\\sirromb\\AppData\\Local\\Temp\\jsii-kernel-RpVQtO\\node_modules\\aws-cdk-lib\\core\\lib\\stage.js:1:2052)", - "C:\\Users\\sirromb\\AppData\\Local\\Temp\\tmppk0m2get\\lib\\program.js:9769:105", - "Kernel._ensureSync (C:\\Users\\sirromb\\AppData\\Local\\Temp\\tmppk0m2get\\lib\\program.js:10358:28)", - "Kernel.invoke (C:\\Users\\sirromb\\AppData\\Local\\Temp\\tmppk0m2get\\lib\\program.js:9769:34)", - "KernelHost.processRequest (C:\\Users\\sirromb\\AppData\\Local\\Temp\\tmppk0m2get\\lib\\program.js:11544:36)", - "KernelHost.run (C:\\Users\\sirromb\\AppData\\Local\\Temp\\tmppk0m2get\\lib\\program.js:11504:22)", - "Immediate._onImmediate (C:\\Users\\sirromb\\AppData\\Local\\Temp\\tmppk0m2get\\lib\\program.js:11505:46)", - "process.processImmediate (node:internal/timers:471:21)" - ] - } - ], - "/CdkStack/CheckBootstrapVersion": [ - { - "type": "aws:cdk:logicalId", - "data": "CheckBootstrapVersion", - "trace": [ - "addBootstrapVersionRule (C:\\Users\\sirromb\\AppData\\Local\\Temp\\jsii-kernel-RpVQtO\\node_modules\\aws-cdk-lib\\core\\lib\\stack-synthesizers\\stack-synthesizer.js:1:5425)", - "DefaultStackSynthesizer.addBootstrapVersionRule (C:\\Users\\sirromb\\AppData\\Local\\Temp\\jsii-kernel-RpVQtO\\node_modules\\aws-cdk-lib\\core\\lib\\stack-synthesizers\\stack-synthesizer.js:1:2792)", - "DefaultStackSynthesizer.synthesize (C:\\Users\\sirromb\\AppData\\Local\\Temp\\jsii-kernel-RpVQtO\\node_modules\\aws-cdk-lib\\core\\lib\\stack-synthesizers\\default-synthesizer.js:1:5963)", - "C:\\Users\\sirromb\\AppData\\Local\\Temp\\jsii-kernel-RpVQtO\\node_modules\\aws-cdk-lib\\core\\lib\\private\\synthesis.js:2:3375", - "visit (C:\\Users\\sirromb\\AppData\\Local\\Temp\\jsii-kernel-RpVQtO\\node_modules\\aws-cdk-lib\\core\\lib\\private\\synthesis.js:4:368)", - "visit (C:\\Users\\sirromb\\AppData\\Local\\Temp\\jsii-kernel-RpVQtO\\node_modules\\aws-cdk-lib\\core\\lib\\private\\synthesis.js:4:330)", - "synthesizeTree (C:\\Users\\sirromb\\AppData\\Local\\Temp\\jsii-kernel-RpVQtO\\node_modules\\aws-cdk-lib\\core\\lib\\private\\synthesis.js:2:3219)", - "synthesize (C:\\Users\\sirromb\\AppData\\Local\\Temp\\jsii-kernel-RpVQtO\\node_modules\\aws-cdk-lib\\core\\lib\\private\\synthesis.js:1:1083)", - "App.synth (C:\\Users\\sirromb\\AppData\\Local\\Temp\\jsii-kernel-RpVQtO\\node_modules\\aws-cdk-lib\\core\\lib\\stage.js:1:2052)", - "C:\\Users\\sirromb\\AppData\\Local\\Temp\\tmppk0m2get\\lib\\program.js:9769:105", - "Kernel._ensureSync (C:\\Users\\sirromb\\AppData\\Local\\Temp\\tmppk0m2get\\lib\\program.js:10358:28)", - "Kernel.invoke (C:\\Users\\sirromb\\AppData\\Local\\Temp\\tmppk0m2get\\lib\\program.js:9769:34)", - "KernelHost.processRequest (C:\\Users\\sirromb\\AppData\\Local\\Temp\\tmppk0m2get\\lib\\program.js:11544:36)", - "KernelHost.run (C:\\Users\\sirromb\\AppData\\Local\\Temp\\tmppk0m2get\\lib\\program.js:11504:22)", - "Immediate._onImmediate (C:\\Users\\sirromb\\AppData\\Local\\Temp\\tmppk0m2get\\lib\\program.js:11505:46)", - "process.processImmediate (node:internal/timers:471:21)" - ] - } - ] - }, - "displayName": "CdkStack" - }, - "Tree": { - "type": "cdk:tree", - "properties": { - "file": "tree.json" - } - } - } -} \ No newline at end of file diff --git a/cdk.out/tree.json b/cdk.out/tree.json deleted file mode 100644 index 34a9965a..00000000 --- a/cdk.out/tree.json +++ /dev/null @@ -1,165 +0,0 @@ -{ - "version": "tree-0.1", - "tree": { - "id": "App", - "path": "", - "children": { - "CdkStack": { - "id": "CdkStack", - "path": "CdkStack", - "children": { - "API_GATEWAY_PRIVATE": { - "id": "API_GATEWAY_PRIVATE", - "path": "CdkStack/API_GATEWAY_PRIVATE", - "children": { - "Resource": { - "id": "Resource", - "path": "CdkStack/API_GATEWAY_PRIVATE/Resource", - "attributes": { - "aws:cdk:cloudformation:type": "AWS::Config::ConfigRule", - "aws:cdk:cloudformation:props": { - "source": { - "owner": "CUSTOM_POLICY", - "sourceDetails": [ - { - "eventSource": "aws.config", - "messageType": "ConfigurationItemChangeNotification" - }, - { - "eventSource": "aws.config", - "messageType": "OversizedConfigurationItemChangeNotification" - } - ], - "customPolicyDetails": { - "enableDebugLogDelivery": false, - "policyRuntime": "guard-2.x.x", - "policyText": "#\n# Select all AWS::ApiGateway::RestApi resources\n# present in the Resources section of the template. \n#\nlet api_gws = Resources.*[ Type == 'AWS::ApiGateway::RestApi']\n\n#\n# Rule intent: \n# 1) All AWS::ApiGateway::RestApi resources deployed must be private. \n# 2) All AWS::ApiGateway::RestApi resources deployed must have at least one AWS Identity and Access Management (IAM) policy condition key to allow access from a VPC.\n#\n# Expectations: \n# 1) SKIP when there are no AWS::ApiGateway::RestApi resources in the template. \n# 2) PASS when:\n# ALL AWS::ApiGateway::RestApi resources in the template have the EndpointConfiguration property set to Type: PRIVATE. \n# ALL AWS::ApiGateway::RestApi resources in the template have one IAM condition key specified in the Policy property with aws:sourceVpc or :SourceVpc. \n# 3) FAIL otherwise. \n#\n#\n\nrule check_rest_api_is_private when %api_gws !empty { \n %api_gws {\n Properties.EndpointConfiguration.Types[*] == \"PRIVATE\" \n } \n} \n\nrule check_rest_api_has_vpc_access when check_rest_api_is_private {\n %api_gws {\n Properties {\n #\n # ALL AWS::ApiGateway::RestApi resources in the template have one IAM condition key specified in the Policy property with \n # aws:sourceVpc or :SourceVpc\n # \n some Policy.Statement[*] {\n Condition.*[ keys == /aws:[sS]ource(Vpc|VPC|Vpce|VPCE)/ ] !empty\n }\n }\n }\n}" - } - }, - "configRuleName": "API_GATEWAY_PRIVATE", - "description": "API_GATEWAY_PRIVATE", - "inputParameters": {} - } - }, - "constructInfo": { - "fqn": "aws-cdk-lib.aws_config.CfnConfigRule", - "version": "2.78.0" - } - } - }, - "constructInfo": { - "fqn": "aws-cdk-lib.aws_config.CustomPolicy", - "version": "2.78.0" - } - }, - "API_GATEWAY_PRIVATERemediationConfiguration": { - "id": "API_GATEWAY_PRIVATERemediationConfiguration", - "path": "CdkStack/API_GATEWAY_PRIVATERemediationConfiguration", - "attributes": { - "aws:cdk:cloudformation:type": "AWS::Config::RemediationConfiguration", - "aws:cdk:cloudformation:props": { - "configRuleName": "API_GATEWAY_PRIVATE", - "targetId": "AWS-PublishSNSNotification", - "targetType": "SSM_DOCUMENT", - "automatic": true, - "maximumAutomaticAttempts": 2, - "parameters": { - "AutomationAssumeRole": { - "StaticValue": { - "Values": [ - { - "Fn::Sub": "arn:aws:iam::${AWS::AccountId}:role/sns-access" - } - ] - } - }, - "Message": { - "StaticValue": { - "Values": [ - "hi" - ] - } - }, - "TopicArn": { - "StaticValue": { - "Values": [ - { - "Fn::Sub": "arn:aws:sns:${AWS::Region}:${AWS::AccountId}:rules-notification" - } - ] - } - } - }, - "resourceType": "AWS::EC2::Instance", - "retryAttemptSeconds": 2, - "targetVersion": "1" - } - }, - "constructInfo": { - "fqn": "aws-cdk-lib.aws_config.CfnRemediationConfiguration", - "version": "2.78.0" - } - }, - "CDKMetadata": { - "id": "CDKMetadata", - "path": "CdkStack/CDKMetadata", - "children": { - "Default": { - "id": "Default", - "path": "CdkStack/CDKMetadata/Default", - "constructInfo": { - "fqn": "aws-cdk-lib.CfnResource", - "version": "2.78.0" - } - }, - "Condition": { - "id": "Condition", - "path": "CdkStack/CDKMetadata/Condition", - "constructInfo": { - "fqn": "aws-cdk-lib.CfnCondition", - "version": "2.78.0" - } - } - }, - "constructInfo": { - "fqn": "constructs.Construct", - "version": "10.2.17" - } - }, - "BootstrapVersion": { - "id": "BootstrapVersion", - "path": "CdkStack/BootstrapVersion", - "constructInfo": { - "fqn": "aws-cdk-lib.CfnParameter", - "version": "2.78.0" - } - }, - "CheckBootstrapVersion": { - "id": "CheckBootstrapVersion", - "path": "CdkStack/CheckBootstrapVersion", - "constructInfo": { - "fqn": "aws-cdk-lib.CfnRule", - "version": "2.78.0" - } - } - }, - "constructInfo": { - "fqn": "aws-cdk-lib.Stack", - "version": "2.78.0" - } - }, - "Tree": { - "id": "Tree", - "path": "Tree", - "constructInfo": { - "fqn": "constructs.Construct", - "version": "10.2.17" - } - } - }, - "constructInfo": { - "fqn": "aws-cdk-lib.App", - "version": "2.78.0" - } - } -} \ No newline at end of file From 69a04cba82eb737fdee39e04547f3d9f201b380e Mon Sep 17 00:00:00 2001 From: Benjamin Morris <93620006+bmorrissirromb@users.noreply.github.com> Date: Tue, 30 May 2023 08:19:40 -0700 Subject: [PATCH 18/23] add developer changes notes --- README.md | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/README.md b/README.md index 1a1bf525..a0e156f9 100644 --- a/README.md +++ b/README.md @@ -13,6 +13,17 @@ Because these changes have the potential to be breaking changes, this will initi Add README.md from RDK v0 here. +pyproject toml should replace bandit, coverage -- use RDK 0.14.0+ as template + +remove python version/terraform + +Remove Pipfile configuration, move anything important into the poetry dev grouping + doc dependency group, test dependency group (eg. moto, mypy), dev dependency group (eg. pylint) + +Makefile can be replaced by poetry's poethepoet taskrunner + Makefiles are misused! + Look to eks-cluster-upgrade for example + # Developer Instructions From efc7ff961df048a1a5dd6b440096fb4fc633f8f5 Mon Sep 17 00:00:00 2001 From: Benjamin Morris Date: Wed, 21 Jun 2023 15:41:38 -0700 Subject: [PATCH 19/23] big update to cdkrdk --- .bandit.yaml | 18 - .coveragerc | 26 - .editorconfig | 31 - .gitattributes | 16 - .gitlint | 21 - .python-version | 1 - .terraform-version | 1 - Makefile | 560 ----- Pipfile | 57 - Pipfile.lock | 1977 ----------------- README.md | 82 +- docs/faq.md | 3 - docs/install.md | 74 - docs/usage/development.md | 0 docs/usage/getting-started.md | 3 - mkdocs.yml | 69 +- mypy.ini | 25 - pyproject.toml | 236 +- rdk/cli/commands/create.py | 2 +- rdk/cli/commands/deploy.py | 10 +- rdk/cli/commands/deploy_organization.py | 2 +- rdk/cli/commands/{destroy.py => undeploy.py} | 11 +- rdk/cli/main.py | 128 +- rdk/core/rules_deploy.py | 47 +- rdk/frameworks/cdk/app.py | 45 +- rdk/frameworks/cdk/cdk/cdk_stack.py | 153 +- .../cdk/cdk/core/rule_parameters.py | 1 + .../AMI_DEPRECATED_CHECK/parameters.json | 13 + .../API_GATEWAY_PRIVATE/parameters.json | 45 + .../API_GATEWAY_PRIVATE/rule_code.guard | 39 + .../ENCRYPTED_VOLUMES/parameters.json | 15 + .../parameters.json | 15 + .../parameters.json | 12 + .../rule_code.guard | 3 + rdk/runners/base.py | 123 +- rdk/runners/cdk.py | 165 +- setup.py | 43 - sonar-project.properties | 38 - tools/ci/bin/init-snapshot.sh | 83 - tools/ci/bin/install-tools-on-jenkins.sh | 49 - tools/docs/bin/generate-ref-cli.sh | 54 - tools/githooks/bin/commit-msg | 41 - tools/githooks/bin/pre-push | 77 - tools/githooks/etc/commit-template | 32 - tox.ini | 2 - twine.pypirc | 17 - 46 files changed, 744 insertions(+), 3721 deletions(-) delete mode 100644 .bandit.yaml delete mode 100644 .coveragerc delete mode 100644 .editorconfig delete mode 100644 .gitattributes delete mode 100644 .gitlint delete mode 100644 .python-version delete mode 100644 .terraform-version delete mode 100644 Makefile delete mode 100644 Pipfile delete mode 100644 Pipfile.lock delete mode 100644 docs/faq.md delete mode 100644 docs/install.md delete mode 100644 docs/usage/development.md delete mode 100644 docs/usage/getting-started.md delete mode 100644 mypy.ini rename rdk/cli/commands/{destroy.py => undeploy.py} (55%) create mode 100644 rdk/frameworks/cdk/tests/integration/rdk-cdk-rule-dir/AMI_DEPRECATED_CHECK/parameters.json create mode 100644 rdk/frameworks/cdk/tests/integration/rdk-cdk-rule-dir/API_GATEWAY_PRIVATE/parameters.json create mode 100644 rdk/frameworks/cdk/tests/integration/rdk-cdk-rule-dir/API_GATEWAY_PRIVATE/rule_code.guard create mode 100644 rdk/frameworks/cdk/tests/integration/rdk-cdk-rule-dir/ENCRYPTED_VOLUMES/parameters.json create mode 100644 rdk/frameworks/cdk/tests/integration/rdk-cdk-rule-dir/S3_BUCKET_SERVER_SIDE_ENCRYPTION_ENABLED/parameters.json create mode 100644 rdk/frameworks/cdk/tests/integration/rdk-cdk-rule-dir/SNS_TOPIC_IN_US_EAST_1_ONLY/parameters.json create mode 100644 rdk/frameworks/cdk/tests/integration/rdk-cdk-rule-dir/SNS_TOPIC_IN_US_EAST_1_ONLY/rule_code.guard delete mode 100644 setup.py delete mode 100644 sonar-project.properties delete mode 100755 tools/ci/bin/init-snapshot.sh delete mode 100644 tools/ci/bin/install-tools-on-jenkins.sh delete mode 100644 tools/docs/bin/generate-ref-cli.sh delete mode 100755 tools/githooks/bin/commit-msg delete mode 100755 tools/githooks/bin/pre-push delete mode 100644 tools/githooks/etc/commit-template delete mode 100644 tox.ini delete mode 100644 twine.pypirc diff --git a/.bandit.yaml b/.bandit.yaml deleted file mode 100644 index 8d65e110..00000000 --- a/.bandit.yaml +++ /dev/null @@ -1,18 +0,0 @@ -############################################################################### -# BANDIT CONFIGURATIONS -############################################################################### - -# -# Reference: -# https://bandit.readthedocs.io/en/latest/config.html -# - -# Excluded path globs -# exclude_dirs: - -# Allow the use of assert in tests -assert_used: - skips: - - test_*.py - -############################################################################### diff --git a/.coveragerc b/.coveragerc deleted file mode 100644 index d6fdb661..00000000 --- a/.coveragerc +++ /dev/null @@ -1,26 +0,0 @@ -################################################################################ -# PYTHON COVERAGE CONFIGURATIONS -################################################################################ - -# -# Reference: -# https://coverage.readthedocs.io/en/latest/config.html -# - -[run] -branch = True - -[report] -fail_under = 90 - -[html] -directory = .reports/coverage-html -title = Coverage Report for rdk - -[xml] -output = .reports/coverage.xml - -[json] -output = .reports/coverage.json - -################################################################################ diff --git a/.editorconfig b/.editorconfig deleted file mode 100644 index 67778693..00000000 --- a/.editorconfig +++ /dev/null @@ -1,31 +0,0 @@ -############################################################################### -# .editorconfig -############################################################################### - -# Configure your IDE - -# Documentation: -# http://editorconfig.org/ - -# top-most EditorConfig file -root = true - -# Configurations for all files -[*] -charset = utf-8 -end_of_line = lf -indent_size = 2 -indent_style = space -insert_final_newline = true -trim_trailing_whitespace = true - -# Makefiles -[{Makefile,*.Makefile}] -indent_size = 4 -indent_style = tab - -# Python -[*.py] -indent_size = 4 - -############################################################################### diff --git a/.gitattributes b/.gitattributes deleted file mode 100644 index 76ca1b4d..00000000 --- a/.gitattributes +++ /dev/null @@ -1,16 +0,0 @@ -############################################################################### -# .gitattributes -############################################################################### - -# gitattributes - defining attributes per path - -# Documentation: -# https://git-scm.com/docs/gitattributes - -# Useful Templates -# https://github.com/alexkaratarakis/gitattributes - -# Everything in this repo is a text file -* text eol=lf - -############################################################################### diff --git a/.gitlint b/.gitlint deleted file mode 100644 index e0bb6b60..00000000 --- a/.gitlint +++ /dev/null @@ -1,21 +0,0 @@ -############################################################################### -# gitlint CONFIGURATIONS -############################################################################### - -# -# Reference: -# https://jorisroovers.com/gitlint/configuration/ -# - -[general] -verbosity = 3 -contrib = contrib-title-conventional-commits -ignore = body-is-missing - -[title-max-length] -line-length = 72 - -[body-max-line-length] -line-length = 80 - -############################################################################### diff --git a/.python-version b/.python-version deleted file mode 100644 index 54c5196a..00000000 --- a/.python-version +++ /dev/null @@ -1 +0,0 @@ -3.10.9 diff --git a/.terraform-version b/.terraform-version deleted file mode 100644 index 90a27f9c..00000000 --- a/.terraform-version +++ /dev/null @@ -1 +0,0 @@ -1.0.5 diff --git a/Makefile b/Makefile deleted file mode 100644 index e8802661..00000000 --- a/Makefile +++ /dev/null @@ -1,560 +0,0 @@ -############################################################################### -# Makefile -############################################################################### - -# -# https://www.gnu.org/software/make/manual/html_node/index.html -# https://www.gnu.org/software/make/manual/html_node/Quick-Reference.html#Quick-Reference -# - -# ----------------------------------------------------------------------------- -# MAKE CONFIGURATIONS -# ----------------------------------------------------------------------------- - -# Default Shell is bash, with errors -SHELL := /usr/bin/env bash -.SHELLFLAGS := -eu -o pipefail -c - -# Do not run in parallel -.NOTPARALLEL: - -# ----------------------------------------------------------------------------- -# CHECK PRE-REQS -# ----------------------------------------------------------------------------- - -# This is intended to run as early as possible to ensure that various things -# that is Makefile depends on is available. - -override prereq_binaries := git python3 pipenv cfn-guard -$(foreach bin,$(prereq_binaries),\ - $(if $(shell command -v $(bin) 2>/dev/null),,\ - $(error '$(bin)' is not installed or available in PATH)\ - )\ -) - -# Make sure we have at least git v2 -ifneq ($(shell git --version | cut -d ' ' -f3 | cut -d. -f1),2) -$(error git is not compatible. Need at least git-2.0) -endif - -# ----------------------------------------------------------------------------- -# VARIABLES - PROJECT CONFIGURATIONS -# ----------------------------------------------------------------------------- - -# Root of the repository -override REPOROOT := $(shell git rev-parse --show-toplevel) - -# Directories -override SRC_DIR := $(REPOROOT)/rdk -override TESTS_DIR := $(REPOROOT)/tests -override TOOLS_DIR := $(REPOROOT)/tools -override REPORTS_DIR := $(REPOROOT)/.reports -override GITHOOKS_DIR := $(TOOLS_DIR)/githooks -override DOCS_DIR := $(REPOROOT)/docs -override TESTS_UNIT_DIR := $(TESTS_DIR)/unit - -# Setup.py configs -override PKG_SETUP := $(REPOROOT)/setup.py -override SETUP_PY_ARGS := --quiet --no-user-cfg - -# python versions -override PYENV_VERSION := $(shell head -1 $(REPOROOT)/.python-version) -export PYENV_VERSION - -# terraform version -override TFENV_TERRAFORM_VERSION := $(shell head -1 $(REPOROOT)/.terraform-version) -export TFENV_TERRAFORM_VERSION - -# pipenv configs -# https://pipenv.pypa.io/en/latest/advanced/#configuration-with-environment-variables -override PIPENV_VENV_IN_PROJECT := 1 -override PIPENV_DEFAULT_PYTHON_VERSION := $(PYENV_VERSION) -export PIPENV_VENV_IN_PROJECT -export PIPENV_DEFAULT_PYTHON_VERSION - -# Collections of file types in the repo -override py_files_in_repo := $(PKG_SETUP) $(SRC_DIR) $(TESTS_DIR) -override md_files_in_repo := $(REPOROOT)/README.md $(DOCS_DIR) - -# Path to init-snapshot helper -override INIT_SNAPSHOT := $(TOOLS_DIR)/ci/bin/init-snapshot.sh -ifneq ($(shell test -x $(INIT_SNAPSHOT); echo $$?),0) -$(shell chmod +x $(INIT_SNAPSHOT)) -endif - -# ------------------------------------------------------------------------------ -# TARGETS - PRIMARY -# ------------------------------------------------------------------------------ - -### * init | Initialize this repository for development -.PHONY: init -init: \ - _githooks-install \ - _python-pipenv-install \ - _helper-init-snapshot-save - -### * fmt | Format source code -.PHONY: fmt -fmt: \ - _helper-init-snapshot-check \ - _fmt-python-docstrings \ - _fmt-python-isort \ - _fmt-python-black \ - _fmt-markdown-mdformat - -### * lint | Lint source code -.PHONY: lint -lint: \ - _helper-init-snapshot-check \ - _test-reports-mkdir \ - _lint-python-docstrings \ - _lint-python-isort \ - _lint-python-black \ - _lint-python-pylint \ - _lint-python-bandit \ - _lint-python-mypy \ - _lint-python-setup \ - _lint-markdown-mdformat - -### * test | Run unit tests -.PHONY: test -test: \ - _helper-init-snapshot-check \ - _test-reports-mkdir \ - _test-python-pytest - -### * sonar | Run sonar analysis -.PHONY: sonar -sonar: \ - _helper-init-snapshot-check \ - _test-sonar-scan - -### * build | Build python package -.PHONY: build -build: \ - _helper-init-snapshot-check \ - _lint-python-setup \ - _build-wheel \ - _deploy-check-dist - -### * deploy | Publish python package -.PHONY: deploy -deploy: \ - _helper-init-snapshot-check \ - _deploy-check-dist \ - _deploy-upload-dist - -### * freeze | Update and lock dependencies -.PHONY: freeze -freeze: \ - _python-pipenv-lock \ - _python-pipenv-gen-requirements - -### * docs-build | Build documentation from sources -.PHONY: docs-build -docs-build: \ - _helper-init-snapshot-check \ - _docs-generate-ref-cli \ - _docs-generate-ref-api \ - _docs-build - -### * docs-server | Start a local server to host documentation -.PHONY: docs-server -docs-server: \ - docs-build \ - _docs-serve - -### * docs-deploy | Publish documentation to Github Pages -.PHONY: docs-deploy -docs-deploy: \ - docs-build \ - _docs-publish-gh-pages - -### * tf-init | Initialize terraform -.PHONY: tf-init -tf-init: \ - _helper-init-snapshot-check \ - _tf-create-plugin-cache \ - _tf-init - -### * tf-plan | Run terraform-plan -.PHONY: tf-plan -tf-plan: \ - _helper-init-snapshot-check \ - _tf-clean-planfile \ - _tf-plan - -### * tf-apply | Run terraform-apply -.PHONY: tf-apply -tf-apply: \ - _helper-init-snapshot-check \ - _tf-apply - -### * tf-destroy | Run terraform-destroy -.PHONY: tf-destroy -tf-destroy: \ - _helper-init-snapshot-check \ - _tf-clean-planfile \ - _tf-destroy \ - _tf-workspace-delete \ - _tf-clean-data-dir - -### * clean | Clean repository -.PHONY: clean -clean: \ - _githooks-clean \ - _clean-dist \ - _test-reports-rm \ - _python-pipenv-rm \ - _clean-all-tf-data-dir \ - _clean-empty-dirs \ - _clean-git - -# ----------------------------------------------------------------------------- -# TARGETS - PYTHON DEPENDENCY MANAGEMENT -# ----------------------------------------------------------------------------- - -.PHONY: _python-pipenv-install -_python-pipenv-install: - @pipenv sync --dev - @pipenv clean - @pipenv check || true - -.PHONY: _python-pipenv-lock -_python-pipenv-lock: - @rm -f Pipfile.lock - @pipenv lock --clear --dev - -.PHONY: _python-pipenv-gen-requirements -_python-pipenv-gen-requirements: - @rm -f > $(REPOROOT)/requirements.txt - @pipenv requirements > $(REPOROOT)/requirements.txt - -.PHONY: _python-pipenv-rm -_python-pipenv-rm: - @pipenv --rm || true - @pipenv --clear - -# ------------------------------------------------------------------------------ -# TARGETS - FORMATTING -# ------------------------------------------------------------------------------ - -.PHONY: _fmt-python-isort -_fmt-python-isort: - @pipenv run -- isort -- $(py_files_in_repo) - -.PHONY: _fmt-python-black -_fmt-python-black: - @pipenv run -- black -- $(py_files_in_repo) - -.PHONY: _fmt-python-docstrings -_fmt-python-docstrings: - @pipenv run -- docformatter \ - --in-place \ - --recursive \ - --blank \ - --pre-summary-newline \ - --make-summary-multi-line \ - -- $(py_files_in_repo) - -.PHONY: _fmt-markdown-mdformat -_fmt-markdown-mdformat: - @pipenv run -- mdformat \ - --number \ - --wrap no \ - -- $(md_files_in_repo) - -# ----------------------------------------------------------------------------- -# TARGETS - LINTING -# ----------------------------------------------------------------------------- - -.PHONY: _lint-python-isort -_lint-python-isort: - @pipenv run -- isort --check -- $(py_files_in_repo) - -.PHONY: _lint-python-black -_lint-python-black: - @pipenv run -- black --check -- $(py_files_in_repo) - -# pylint runs multiple times -# We need to do this to support multiple output formats -# run1: For the terminal (developer feedback) -# run2: For sonar compatible messages -# run3: For JSON formatted output, that then produces an HTML -.PHONY: _lint-python-pylint -_lint-python-pylint: - @pipenv run -- pylint \ - --reports=n \ - --output-format=colorized \ - -- $(SRC_DIR) - @pipenv run -- pylint \ - --exit-zero \ - --reports=n \ - --output-format=text \ - --msg-template='{path}:{line}: [{msg_id}({symbol}), {obj}] {msg}' \ - -- $(SRC_DIR) \ - > $(REPORTS_DIR)/pylint-sonar.txt - @pipenv run -- pylint \ - --exit-zero \ - --reports=y \ - --output-format=jsonextended \ - -- $(SRC_DIR) \ - > $(REPORTS_DIR)/pylint.json - @pipenv run -- pylint-json2html \ - --input-format jsonextended \ - --output $(REPORTS_DIR)/pylint.html \ - $(REPORTS_DIR)/pylint.json - -# bandit runs multiple times -# We need to do this to support multiple output formats -# run1: For the terminal (developer feedback) -# run2: For sonar compatible messages -# run3: For HTML report -.PHONY: _lint-python-bandit -_lint-python-bandit: - @pipenv run -- bandit \ - --recursive \ - --quiet \ - --configfile $(REPOROOT)/.bandit.yaml \ - --format screen \ - -- $(SRC_DIR) - @pipenv run -- bandit \ - --recursive \ - --quiet \ - --exit-zero \ - --configfile $(REPOROOT)/.bandit.yaml \ - --format json \ - --output $(REPORTS_DIR)/bandit.json \ - -- $(SRC_DIR) - @pipenv run -- bandit \ - --recursive \ - --quiet \ - --exit-zero \ - --configfile $(REPOROOT)/.bandit.yaml \ - --format html \ - --output $(REPORTS_DIR)/bandit.html \ - -- $(SRC_DIR) - -.PHONY: _lint-python-mypy -_lint-python-mypy: - @pipenv run -- mypy \ - -- $(SRC_DIR) - -.PHONY: _lint-python-docstrings -_lint-python-docstrings: - @pipenv run -- docformatter \ - --check \ - --recursive \ - --blank \ - --pre-summary-newline \ - --make-summary-multi-line \ - -- $(py_files_in_repo) - -.PHONY: _lint-python-setup -_lint-python-setup: - @pipenv run -- \ - python -W ignore -- \ - $(PKG_SETUP) $(SETUP_PY_ARGS) check --strict - -.PHONY: _lint-markdown-mdformat -_lint-markdown-mdformat: - @pipenv run -- mdformat \ - --check \ - --number \ - --wrap no \ - -- $(md_files_in_repo) - -# ----------------------------------------------------------------------------- -# TARGETS - TESTING -# ----------------------------------------------------------------------------- - -.PHONY: _test-reports-mkdir -_test-reports-mkdir: - @mkdir -p $(REPORTS_DIR) - -.PHONY: _test-reports-rm -_test-reports-rm: - @rm -rf $(REPORTS_DIR) - -.PHONY: _test-python-pytest -_test-python-pytest: - @rm -rf $(REPOROOT)/.rdk - @pipenv run -- pytest $(TESTS_UNIT_DIR) - @rm -rf $(REPOROOT)/.rdk - -.PHONY: _test-sonar-scan -_test-sonar-scan: - @if ! command -v sonar-scanner >/dev/null 2>&1; then \ - echo "sonar-scanner is not installed" >&2; \ - exit 1; \ - fi - @if ! test -n "$${SONAR_TOKEN}"; then \ - echo "SONAR_TOKEN is not set" >&2; \ - exit 1; \ - fi - @rdk_version=$$(grep 'VERSION.*=.*' \ - rdk/__init__.py \ - | head -1 | cut -d '=' -f2 \ - | tr -d ' ' | tr -d '"' \ - ) \ - && git_branch=$$(git rev-parse --abbrev-ref HEAD) \ - && export SONAR_SCANNER_OPTS="-Xmx512m" \ - && sonar-scanner \ - -Dsonar.login="$${SONAR_TOKEN}" \ - -Dsonar.projectVersion="$${rdk_version}" \ - -Dsonar.branch.name="$${git_branch}" - -# ----------------------------------------------------------------------------- -# TARGETS - BUILD -# ----------------------------------------------------------------------------- - -.PHONY: _build-wheel -_build-wheel: - @pipenv run -- \ - python -W ignore -- \ - $(PKG_SETUP) $(SETUP_PY_ARGS) bdist_wheel \ - --universal \ - --python-tag "py3" \ - --owner "nobody" \ - --group "nobody" - -# ----------------------------------------------------------------------------- -# TARGETS - DEPLOY -# ----------------------------------------------------------------------------- - -.PHONY: _deploy-check-dist -_deploy-check-dist: - @find ./dist -mindepth 1 -maxdepth 1 -type f -print0 \ - | xargs -0 -- pipenv run -- twine check --strict - -.PHONY: _deploy-upload-dist -_deploy-upload-dist: - @find ./dist -mindepth 1 -maxdepth 1 -type f -print0 \ - | xargs -0 -- pipenv run -- twine upload \ - --config-file $(REPOROOT)/twine.pypirc \ - --non-interactive \ - --repository artifactory \ - --verbose \ - -- - -# ------------------------------------------------------------------------------ -# TARGETS - GITHOOKS -# ------------------------------------------------------------------------------ - -.PHONY: _githooks-install -_githooks-install: - @chmod +x $(GITHOOKS_DIR)/bin/* - @git config --local core.hooksPath $(GITHOOKS_DIR)/bin - @git config --local commit.template $(GITHOOKS_DIR)/etc/commit-template - @git config --local fetch.prune true - @git config --local fetch.pruneTags true - @git config --local push.default simple - @git config --local pull.ff true - @git config --local pull.rebase false - @git config --local user.useConfigOnly true - -.PHONY: _githooks-clean -_githooks-clean: - @for c in \ - core.hooksPath \ - commit.template \ - ; do \ - if git config --local --get $$c >/dev/null 2>&1; then \ - git config --local --unset $$c; \ - fi; \ - done - -# ----------------------------------------------------------------------------- -# TARGETS - DOCS -# ----------------------------------------------------------------------------- - -.PHONY: _docs-generate-ref-cli -_docs-generate-ref-cli: - @chmod +x $(TOOLS_DIR)/docs/bin/generate-ref-cli.sh - @pipenv run -- $(TOOLS_DIR)/docs/bin/generate-ref-cli.sh - -.PHONY: _docs-generate-ref-api -_docs-generate-ref-api: - @rm -rf $(DOCS_DIR)/reference/api - @pipenv run -- pdoc -o $(DOCS_DIR)/reference/api rdk.pytest - -.PHONY: _docs-build -_docs-build: - @pipenv run -- mkdocs build - -.PHONY: _docs-serve -_docs-serve: - @pipenv run -- mkdocs serve - -.PHONY: _docs-publish-gh-pages -_docs-publish-gh-pages: - @pipenv run -- mkdocs gh-deploy --message "docs: publish from {sha}" - -# ----------------------------------------------------------------------------- -# TARGETS - CLEAN -# ----------------------------------------------------------------------------- - -.PHONY: _clean-dist -_clean-dist: - @rm -rf ./build ./dist ./*.egg-info - -.PHONY: _clean-git -_clean-git: - @git clean -fdXq - -.PHONY: _clean-empty-dirs -_clean-empty-dirs: - @find $(REPOROOT) -type d -empty -print0 \ - | xargs -0 -- rm -rf - -.PHONY: _clean-all-tf-data-dir -_clean-all-tf-data-dir: - @find $(REPOROOT) -type d -name '.terraform' -print0 \ - | xargs -0 -- rm -rf - @find $(REPOROOT) -type d -name '*.terraform' -print0 \ - | xargs -0 -- rm -rf - @find $(REPOROOT) -type f -name 'tfplan' -print0 \ - | xargs -0 -- rm -f - @find $(REPOROOT) -type f -name '*tfplan*' -print0 \ - | xargs -0 -- rm -f - -# ----------------------------------------------------------------------------- -# TARGETS - HELPERS -# ----------------------------------------------------------------------------- - -.PHONY: _helper-init-snapshot-save -_helper-init-snapshot-save: - @$(INIT_SNAPSHOT) save - -.PHONY: _helper-init-snapshot-check -_helper-init-snapshot-check: - @$(INIT_SNAPSHOT) check ; rc=$$?; \ - if [[ "$$rc" -eq 0 ]]; then exit 0; fi; \ - if [[ "$$rc" -eq 1 ]]; then \ - echo "ERROR: Failed to check if repository initialization is required." >&2; \ - exit 1; \ - fi; \ - if [[ "$$rc" -eq 2 ]]; then \ - echo "WARNING: Repository initialization is required. Running now ..." >&2; \ - cd $(REPOROOT) || exit 1; \ - make init || exit 1; \ - fi - -# ----------------------------------------------------------------------------- -# TARGETS - HELP (DEFAULT) -# ----------------------------------------------------------------------------- - -### * help | Prints this message -.PHONY: help -.DEFAULT_GOAL := help -help: - @echo "USAGE: make [target ...]" - @echo - @echo "TARGETS:" - @echo - @grep -E '^###[[:blank:]]*\*[[:blank:]]*' $(lastword $(MAKEFILE_LIST)) \ - | sed -e 's|^###[[:blank:]]*\*[[:blank:]]*| |g' \ - | column -s'|' -t - @echo - -############################################################################### diff --git a/Pipfile b/Pipfile deleted file mode 100644 index a9196efa..00000000 --- a/Pipfile +++ /dev/null @@ -1,57 +0,0 @@ -[[source]] -name = "pypi" -url = "https://pypi.python.org/simple" -verify_ssl = true - -[dev-packages] -bandit = ">=1,<2" -docformatter = ">=1,<2" -gitlint = "<1" -isort= ">=5,<6" -lxml = ">=4,<5" -mdformat = "<1" -mdformat-beautysh = "<1" -mdformat-black = "<1" -mdformat-config = "<1" -mdformat-frontmatter = "<1" -mdformat-toc = "<1" -mkdocs = ">=1,<2" -mkdocs-material = ">=7,<8" -moto = { extras = ["cloudwatch", "sts", "s3"], version = ">=2,<3" } -mypy = "<1" -pdoc = ">=7,<8" -pylint = ">=2,<3" -pylint-json2html = "<1" -pylint-pytest = ">=1,<2" -pytest = ">=6,<7" -pytest-asyncio = "<1" -pytest-console-scripts = ">=1,<2" -pytest-cov = ">=2,<3" -pytest-env = "<1" -pytest-httpx = "<1" -pytest-mock = ">=3,<4" -pytest-reportlog = "<1" -twine = ">=3,<4" -types-aiofiles = "<1" - -# pinned to full version since we can't specify pre-releases for a single pkg -# see: https://github.com/pypa/pipenv/issues/1760 -black = "==21.7b0" - -# This package -rdk = {editable = true, path = "."} - -[packages] -# NOTE: If you update this, also update setup.py -aiofiles = "<1" -aws-cdk-lib = ">=2" -constructs = ">=10,<11" -# boto3 = ">=1,<2" -colorlog = ">=4,<5" -httpx = "<1" -mergedeep = ">=1,<2" -pytest = ">=6,<7" -semver = ">=2,<3" - -[requires] -python_version = "3.8" diff --git a/Pipfile.lock b/Pipfile.lock deleted file mode 100644 index 709fd44a..00000000 --- a/Pipfile.lock +++ /dev/null @@ -1,1977 +0,0 @@ -{ - "_meta": { - "hash": { - "sha256": "7a6857503771f671fed5b5daf86254a4f9fefdbb5e7ecbe6f68fd144aff8e597" - }, - "pipfile-spec": 6, - "requires": { - "python_version": "3.8" - }, - "sources": [ - { - "name": "pypi", - "url": "https://pypi.python.org/simple", - "verify_ssl": true - } - ] - }, - "default": { - "aiofiles": { - "hashes": [ - "sha256:7a973fc22b29e9962d0897805ace5856e6a566ab1f0c8e5c91ff6c866519c937", - "sha256:8334f23235248a3b2e83b2c3a78a22674f39969b96397126cc93664d9a901e59" - ], - "index": "pypi", - "version": "==0.8.0" - }, - "anyio": { - "hashes": [ - "sha256:25ea0d673ae30af41a0c442f81cf3b38c7e79fdc7b60335a4c14e05eb0947421", - "sha256:fbbe32bd270d2a2ef3ed1c5d45041250284e31fc0a4df4a5a6071842051a51e3" - ], - "markers": "python_full_version >= '3.6.2'", - "version": "==3.6.2" - }, - "attrs": { - "hashes": [ - "sha256:29e95c7f6778868dbd49170f98f8818f78f3dc5e0e37c0b1f474e3561b240836", - "sha256:c9227bfc2f01993c03f68db37d1d15c9690188323c067c641f1a35ca58185f99" - ], - "markers": "python_version >= '3.6'", - "version": "==22.2.0" - }, - "aws-cdk-lib": { - "hashes": [ - "sha256:0d7001b0f507dcd435c6c20688e61d6c45c297e54bae2bf36256e10520668a8a", - "sha256:a0aeaf0e0d0dcc36fe52a1df09708028a8f71f54116bc3f2afec546b0d90c256" - ], - "index": "pypi", - "version": "==2.72.1" - }, - "aws-cdk.asset-awscli-v1": { - "hashes": [ - "sha256:90adb0f2405e0794607dddab09c7427ca02941655fbfe0164459111d7509b1a3", - "sha256:fa6d42e0d026de4cbb610672acdbfa00db30e763613cfabb0c7e7b6cea63275d" - ], - "markers": "python_version ~= '3.7'", - "version": "==2.2.129" - }, - "aws-cdk.asset-kubectl-v20": { - "hashes": [ - "sha256:9834cdb150c5590aea4e5eba6de2a89b4c60617451181c524810c5a75154565c", - "sha256:a2fad1a5a35a94a465efe60859f91e45dacc33261fb9bbf1cf9bbc6e2f70e9d6" - ], - "markers": "python_version ~= '3.7'", - "version": "==2.1.1" - }, - "aws-cdk.asset-node-proxy-agent-v5": { - "hashes": [ - "sha256:31ef1c6e49ca1baaa4e32b1b4f1ba6c1f493939741387ec30fe581aebc2a18ae", - "sha256:d3c5402148ec2964b6faec33043760182c7434e3e000369363c265c01717cd0c" - ], - "markers": "python_version ~= '3.7'", - "version": "==2.0.105" - }, - "cattrs": { - "hashes": [ - "sha256:bc12b1f0d000b9f9bee83335887d532a1d3e99a833d1bf0882151c97d3e68c21", - "sha256:f0eed5642399423cf656e7b66ce92cdc5b963ecafd041d1b24d136fdde7acf6d" - ], - "markers": "python_version >= '3.7'", - "version": "==22.2.0" - }, - "certifi": { - "hashes": [ - "sha256:35824b4c3a97115964b408844d64aa14db1cc518f6562e8d7261699d1350a9e3", - "sha256:4ad3232f5e926d6718ec31cfc1fcadfde020920e278684144551c91769c7bc18" - ], - "markers": "python_version >= '3.6'", - "version": "==2022.12.7" - }, - "colorlog": { - "hashes": [ - "sha256:3dd15cb27e8119a24c1a7b5c93f9f3b455855e0f73993b1c25921b2f646f1dcd", - "sha256:59b53160c60902c405cdec28d38356e09d40686659048893e026ecbd589516b1" - ], - "index": "pypi", - "version": "==4.8.0" - }, - "constructs": { - "hashes": [ - "sha256:94e8f2dc238e30129013a808f23a109a4c5749b94616a8c51c2597ce49bd623c", - "sha256:e99390593511ceec1964beeab0977c6df4ca4dbdf0bf17dc3391471fb202e9fb" - ], - "index": "pypi", - "version": "==10.1.302" - }, - "exceptiongroup": { - "hashes": [ - "sha256:232c37c63e4f682982c8b6459f33a8981039e5fb8756b2074364e5055c498c9e", - "sha256:d484c3090ba2889ae2928419117447a14daf3c1231d5e30d0aae34f354f01785" - ], - "markers": "python_version < '3.11'", - "version": "==1.1.1" - }, - "h11": { - "hashes": [ - "sha256:8f19fbbe99e72420ff35c00b27a34cb9937e902a8b810e2c88300c6f0a3b699d", - "sha256:e3fe4ac4b851c468cc8363d500db52c2ead036020723024a109d37346efaa761" - ], - "markers": "python_version >= '3.7'", - "version": "==0.14.0" - }, - "httpcore": { - "hashes": [ - "sha256:c5d6f04e2fc530f39e0c077e6a30caa53f1451096120f1f38b954afd0b17c0cb", - "sha256:da1fb708784a938aa084bde4feb8317056c55037247c787bd7e19eb2c2949dc0" - ], - "markers": "python_version >= '3.7'", - "version": "==0.16.3" - }, - "httpx": { - "hashes": [ - "sha256:9818458eb565bb54898ccb9b8b251a28785dd4a55afbc23d0eb410754fe7d0f9", - "sha256:a211fcce9b1254ea24f0cd6af9869b3d29aba40154e947d2a07bb499b3e310d6" - ], - "index": "pypi", - "version": "==0.23.3" - }, - "idna": { - "hashes": [ - "sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4", - "sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2" - ], - "version": "==3.4" - }, - "importlib-resources": { - "hashes": [ - "sha256:4be82589bf5c1d7999aedf2a45159d10cb3ca4f19b2271f8792bc8e6da7b22f6", - "sha256:7b1deeebbf351c7578e09bf2f63fa2ce8b5ffec296e0d349139d43cca061a81a" - ], - "markers": "python_version >= '3.7'", - "version": "==5.12.0" - }, - "iniconfig": { - "hashes": [ - "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3", - "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374" - ], - "markers": "python_version >= '3.7'", - "version": "==2.0.0" - }, - "jsii": { - "hashes": [ - "sha256:4da63ab99f2696cd063574460c94221f0a7de9d345e71dfb19dfbcecf8ca8355", - "sha256:ea3cace063f6a47cdf0a74c929618d779efab426fedb7692a8ac1b9b29797f8c" - ], - "markers": "python_version ~= '3.7'", - "version": "==1.80.0" - }, - "mergedeep": { - "hashes": [ - "sha256:0096d52e9dad9939c3d975a774666af186eda617e6ca84df4c94dec30004f2a8", - "sha256:70775750742b25c0d8f36c55aed03d24c3384d17c951b3175d898bd778ef0307" - ], - "index": "pypi", - "version": "==1.3.4" - }, - "packaging": { - "hashes": [ - "sha256:714ac14496c3e68c99c29b00845f7a2b85f3bb6f1078fd9f72fd20f0570002b2", - "sha256:b6ad297f8907de0fa2fe1ccbd26fdaf387f5f47c7275fedf8cce89f99446cf97" - ], - "markers": "python_version >= '3.7'", - "version": "==23.0" - }, - "pluggy": { - "hashes": [ - "sha256:4224373bacce55f955a878bf9cfa763c1e360858e330072059e10bad68531159", - "sha256:74134bbf457f031a36d68416e1509f34bd5ccc019f0bcc952c7b909d06b37bd3" - ], - "markers": "python_version >= '3.6'", - "version": "==1.0.0" - }, - "publication": { - "hashes": [ - "sha256:0248885351febc11d8a1098d5c8e3ab2dabcf3e8c0c96db1e17ecd12b53afbe6", - "sha256:68416a0de76dddcdd2930d1c8ef853a743cc96c82416c4e4d3b5d901c6276dc4" - ], - "version": "==0.0.3" - }, - "py": { - "hashes": [ - "sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719", - "sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378" - ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'", - "version": "==1.11.0" - }, - "pytest": { - "hashes": [ - "sha256:131b36680866a76e6781d13f101efb86cf674ebb9762eb70d3082b6f29889e89", - "sha256:7310f8d27bc79ced999e760ca304d69f6ba6c6649c0b60fb0e04a4a77cacc134" - ], - "index": "pypi", - "version": "==6.2.5" - }, - "python-dateutil": { - "hashes": [ - "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86", - "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9" - ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2'", - "version": "==2.8.2" - }, - "rfc3986": { - "extras": [ - "idna2008" - ], - "hashes": [ - "sha256:270aaf10d87d0d4e095063c65bf3ddbc6ee3d0b226328ce21e036f946e421835", - "sha256:a86d6e1f5b1dc238b218b012df0aa79409667bb209e58da56d0b94704e712a97" - ], - "version": "==1.5.0" - }, - "semver": { - "hashes": [ - "sha256:ced8b23dceb22134307c1b8abfa523da14198793d9787ac838e70e29e77458d4", - "sha256:fa0fe2722ee1c3f57eac478820c3a5ae2f624af8264cbdf9000c980ff7f75e3f" - ], - "index": "pypi", - "version": "==2.13.0" - }, - "six": { - "hashes": [ - "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926", - "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254" - ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2'", - "version": "==1.16.0" - }, - "sniffio": { - "hashes": [ - "sha256:e60305c5e5d314f5389259b7f22aaa33d8f7dee49763119234af3755c55b9101", - "sha256:eecefdce1e5bbfb7ad2eeaabf7c1eeb404d7757c379bd1f7e5cce9d8bf425384" - ], - "markers": "python_version >= '3.7'", - "version": "==1.3.0" - }, - "toml": { - "hashes": [ - "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b", - "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f" - ], - "markers": "python_version >= '2.6' and python_version not in '3.0, 3.1, 3.2'", - "version": "==0.10.2" - }, - "typeguard": { - "hashes": [ - "sha256:00edaa8da3a133674796cf5ea87d9f4b4c367d77476e185e80251cc13dfbb8c4", - "sha256:5e3e3be01e887e7eafae5af63d1f36c849aaa94e3a0112097312aabfa16284f1" - ], - "markers": "python_full_version >= '3.5.3'", - "version": "==2.13.3" - }, - "typing-extensions": { - "hashes": [ - "sha256:5cb5f4a79139d699607b3ef622a1dedafa84e115ab0024e0d9c044a9479ca7cb", - "sha256:fb33085c39dd998ac16d1431ebc293a8b3eedd00fd4a32de0ff79002c19511b4" - ], - "markers": "python_version >= '3.7'", - "version": "==4.5.0" - }, - "zipp": { - "hashes": [ - "sha256:112929ad649da941c23de50f356a2b5570c954b65150642bccdd66bf194d224b", - "sha256:48904fc76a60e542af151aded95726c1a5c34ed43ab4134b597665c86d7ad556" - ], - "markers": "python_version < '3.10'", - "version": "==3.15.0" - } - }, - "develop": { - "aiofiles": { - "hashes": [ - "sha256:7a973fc22b29e9962d0897805ace5856e6a566ab1f0c8e5c91ff6c866519c937", - "sha256:8334f23235248a3b2e83b2c3a78a22674f39969b96397126cc93664d9a901e59" - ], - "index": "pypi", - "version": "==0.8.0" - }, - "anyio": { - "hashes": [ - "sha256:25ea0d673ae30af41a0c442f81cf3b38c7e79fdc7b60335a4c14e05eb0947421", - "sha256:fbbe32bd270d2a2ef3ed1c5d45041250284e31fc0a4df4a5a6071842051a51e3" - ], - "markers": "python_full_version >= '3.6.2'", - "version": "==3.6.2" - }, - "appdirs": { - "hashes": [ - "sha256:7d5d0167b2b1ba821647616af46a749d1c653740dd0d2415100fe26e27afdf41", - "sha256:a841dacd6b99318a741b166adb07e19ee71a274450e68237b4650ca1055ab128" - ], - "version": "==1.4.4" - }, - "arrow": { - "hashes": [ - "sha256:3934b30ca1b9f292376d9db15b19446088d12ec58629bc3f0da28fd55fb633a1", - "sha256:5a49ab92e3b7b71d96cd6bfcc4df14efefc9dfa96ea19045815914a6ab6b1fe2" - ], - "markers": "python_version >= '3.6'", - "version": "==1.2.3" - }, - "astroid": { - "hashes": [ - "sha256:6e61b85c891ec53b07471aec5878f4ac6446a41e590ede0f2ce095f39f7d49dd", - "sha256:dea89d9f99f491c66ac9c04ebddf91e4acf8bd711722175fe6245c0725cc19bb" - ], - "markers": "python_full_version >= '3.7.2'", - "version": "==2.15.2" - }, - "astunparse": { - "hashes": [ - "sha256:5ad93a8456f0d084c3456d059fd9a92cce667963232cbf763eac3bc5b7940872", - "sha256:c2652417f2c8b5bb325c885ae329bdf3f86424075c4fd1a128674bc6fba4b8e8" - ], - "markers": "python_version < '3.9'", - "version": "==1.6.3" - }, - "attrs": { - "hashes": [ - "sha256:29e95c7f6778868dbd49170f98f8818f78f3dc5e0e37c0b1f474e3561b240836", - "sha256:c9227bfc2f01993c03f68db37d1d15c9690188323c067c641f1a35ca58185f99" - ], - "markers": "python_version >= '3.6'", - "version": "==22.2.0" - }, - "aws-cdk-lib": { - "hashes": [ - "sha256:0d7001b0f507dcd435c6c20688e61d6c45c297e54bae2bf36256e10520668a8a", - "sha256:a0aeaf0e0d0dcc36fe52a1df09708028a8f71f54116bc3f2afec546b0d90c256" - ], - "index": "pypi", - "version": "==2.72.1" - }, - "aws-cdk.asset-awscli-v1": { - "hashes": [ - "sha256:90adb0f2405e0794607dddab09c7427ca02941655fbfe0164459111d7509b1a3", - "sha256:fa6d42e0d026de4cbb610672acdbfa00db30e763613cfabb0c7e7b6cea63275d" - ], - "markers": "python_version ~= '3.7'", - "version": "==2.2.129" - }, - "aws-cdk.asset-kubectl-v20": { - "hashes": [ - "sha256:9834cdb150c5590aea4e5eba6de2a89b4c60617451181c524810c5a75154565c", - "sha256:a2fad1a5a35a94a465efe60859f91e45dacc33261fb9bbf1cf9bbc6e2f70e9d6" - ], - "markers": "python_version ~= '3.7'", - "version": "==2.1.1" - }, - "aws-cdk.asset-node-proxy-agent-v5": { - "hashes": [ - "sha256:31ef1c6e49ca1baaa4e32b1b4f1ba6c1f493939741387ec30fe581aebc2a18ae", - "sha256:d3c5402148ec2964b6faec33043760182c7434e3e000369363c265c01717cd0c" - ], - "markers": "python_version ~= '3.7'", - "version": "==2.0.105" - }, - "bandit": { - "hashes": [ - "sha256:75665181dc1e0096369112541a056c59d1c5f66f9bb74a8d686c3c362b83f549", - "sha256:bdfc739baa03b880c2d15d0431b31c658ffc348e907fe197e54e0389dd59e11e" - ], - "index": "pypi", - "version": "==1.7.5" - }, - "beautysh": { - "hashes": [ - "sha256:423e0c87cccf2af21cae9a75e04e0a42bc6ce28469c001ee8730242e10a45acd", - "sha256:8c7d9c4f2bd02c089194218238b7ecc78879506326b301eba1d5f49471a55bac" - ], - "markers": "python_full_version >= '3.6.2' and python_full_version < '4.0.0'", - "version": "==6.2.1" - }, - "black": { - "hashes": [ - "sha256:1c7aa6ada8ee864db745b22790a32f94b2795c253a75d6d9b5e439ff10d23116", - "sha256:c8373c6491de9362e39271630b65b964607bc5c79c83783547d76c839b3aa219" - ], - "index": "pypi", - "version": "==21.7b0" - }, - "bleach": { - "hashes": [ - "sha256:1a1a85c1595e07d8db14c5f09f09e6433502c51c595970edc090551f0db99414", - "sha256:33c16e3353dbd13028ab4799a0f89a83f113405c766e9c122df8a06f5b85b3f4" - ], - "markers": "python_version >= '3.7'", - "version": "==6.0.0" - }, - "boto3": { - "hashes": [ - "sha256:816a198a6cc4f283af6b21439d85be6dbe4b73c2232dd906c6bafb4fece28d19", - "sha256:9de90a2c0b853f84436b032b28947fc8a765dc462573a8d543b13f16c6579b40" - ], - "markers": "python_version >= '3.7'", - "version": "==1.26.107" - }, - "botocore": { - "hashes": [ - "sha256:ee1e43e6cd0864cc6811ba3f05123647612ee3f07a286a4c94f5885aa86d6922", - "sha256:f63942b4b7248c0b3d6ecbc2852cf0787c23ace2a91a012f7ee0b3ae3eb08f4f" - ], - "markers": "python_version >= '3.7'", - "version": "==1.29.107" - }, - "cattrs": { - "hashes": [ - "sha256:bc12b1f0d000b9f9bee83335887d532a1d3e99a833d1bf0882151c97d3e68c21", - "sha256:f0eed5642399423cf656e7b66ce92cdc5b963ecafd041d1b24d136fdde7acf6d" - ], - "markers": "python_version >= '3.7'", - "version": "==22.2.0" - }, - "certifi": { - "hashes": [ - "sha256:35824b4c3a97115964b408844d64aa14db1cc518f6562e8d7261699d1350a9e3", - "sha256:4ad3232f5e926d6718ec31cfc1fcadfde020920e278684144551c91769c7bc18" - ], - "markers": "python_version >= '3.6'", - "version": "==2022.12.7" - }, - "cffi": { - "hashes": [ - "sha256:00a9ed42e88df81ffae7a8ab6d9356b371399b91dbdf0c3cb1e84c03a13aceb5", - "sha256:03425bdae262c76aad70202debd780501fabeaca237cdfddc008987c0e0f59ef", - "sha256:04ed324bda3cda42b9b695d51bb7d54b680b9719cfab04227cdd1e04e5de3104", - "sha256:0e2642fe3142e4cc4af0799748233ad6da94c62a8bec3a6648bf8ee68b1c7426", - "sha256:173379135477dc8cac4bc58f45db08ab45d228b3363adb7af79436135d028405", - "sha256:198caafb44239b60e252492445da556afafc7d1e3ab7a1fb3f0584ef6d742375", - "sha256:1e74c6b51a9ed6589199c787bf5f9875612ca4a8a0785fb2d4a84429badaf22a", - "sha256:2012c72d854c2d03e45d06ae57f40d78e5770d252f195b93f581acf3ba44496e", - "sha256:21157295583fe8943475029ed5abdcf71eb3911894724e360acff1d61c1d54bc", - "sha256:2470043b93ff09bf8fb1d46d1cb756ce6132c54826661a32d4e4d132e1977adf", - "sha256:285d29981935eb726a4399badae8f0ffdff4f5050eaa6d0cfc3f64b857b77185", - "sha256:30d78fbc8ebf9c92c9b7823ee18eb92f2e6ef79b45ac84db507f52fbe3ec4497", - "sha256:320dab6e7cb2eacdf0e658569d2575c4dad258c0fcc794f46215e1e39f90f2c3", - "sha256:33ab79603146aace82c2427da5ca6e58f2b3f2fb5da893ceac0c42218a40be35", - "sha256:3548db281cd7d2561c9ad9984681c95f7b0e38881201e157833a2342c30d5e8c", - "sha256:3799aecf2e17cf585d977b780ce79ff0dc9b78d799fc694221ce814c2c19db83", - "sha256:39d39875251ca8f612b6f33e6b1195af86d1b3e60086068be9cc053aa4376e21", - "sha256:3b926aa83d1edb5aa5b427b4053dc420ec295a08e40911296b9eb1b6170f6cca", - "sha256:3bcde07039e586f91b45c88f8583ea7cf7a0770df3a1649627bf598332cb6984", - "sha256:3d08afd128ddaa624a48cf2b859afef385b720bb4b43df214f85616922e6a5ac", - "sha256:3eb6971dcff08619f8d91607cfc726518b6fa2a9eba42856be181c6d0d9515fd", - "sha256:40f4774f5a9d4f5e344f31a32b5096977b5d48560c5592e2f3d2c4374bd543ee", - "sha256:4289fc34b2f5316fbb762d75362931e351941fa95fa18789191b33fc4cf9504a", - "sha256:470c103ae716238bbe698d67ad020e1db9d9dba34fa5a899b5e21577e6d52ed2", - "sha256:4f2c9f67e9821cad2e5f480bc8d83b8742896f1242dba247911072d4fa94c192", - "sha256:50a74364d85fd319352182ef59c5c790484a336f6db772c1a9231f1c3ed0cbd7", - "sha256:54a2db7b78338edd780e7ef7f9f6c442500fb0d41a5a4ea24fff1c929d5af585", - "sha256:5635bd9cb9731e6d4a1132a498dd34f764034a8ce60cef4f5319c0541159392f", - "sha256:59c0b02d0a6c384d453fece7566d1c7e6b7bae4fc5874ef2ef46d56776d61c9e", - "sha256:5d598b938678ebf3c67377cdd45e09d431369c3b1a5b331058c338e201f12b27", - "sha256:5df2768244d19ab7f60546d0c7c63ce1581f7af8b5de3eb3004b9b6fc8a9f84b", - "sha256:5ef34d190326c3b1f822a5b7a45f6c4535e2f47ed06fec77d3d799c450b2651e", - "sha256:6975a3fac6bc83c4a65c9f9fcab9e47019a11d3d2cf7f3c0d03431bf145a941e", - "sha256:6c9a799e985904922a4d207a94eae35c78ebae90e128f0c4e521ce339396be9d", - "sha256:70df4e3b545a17496c9b3f41f5115e69a4f2e77e94e1d2a8e1070bc0c38c8a3c", - "sha256:7473e861101c9e72452f9bf8acb984947aa1661a7704553a9f6e4baa5ba64415", - "sha256:8102eaf27e1e448db915d08afa8b41d6c7ca7a04b7d73af6514df10a3e74bd82", - "sha256:87c450779d0914f2861b8526e035c5e6da0a3199d8f1add1a665e1cbc6fc6d02", - "sha256:8b7ee99e510d7b66cdb6c593f21c043c248537a32e0bedf02e01e9553a172314", - "sha256:91fc98adde3d7881af9b59ed0294046f3806221863722ba7d8d120c575314325", - "sha256:94411f22c3985acaec6f83c6df553f2dbe17b698cc7f8ae751ff2237d96b9e3c", - "sha256:98d85c6a2bef81588d9227dde12db8a7f47f639f4a17c9ae08e773aa9c697bf3", - "sha256:9ad5db27f9cabae298d151c85cf2bad1d359a1b9c686a275df03385758e2f914", - "sha256:a0b71b1b8fbf2b96e41c4d990244165e2c9be83d54962a9a1d118fd8657d2045", - "sha256:a0f100c8912c114ff53e1202d0078b425bee3649ae34d7b070e9697f93c5d52d", - "sha256:a591fe9e525846e4d154205572a029f653ada1a78b93697f3b5a8f1f2bc055b9", - "sha256:a5c84c68147988265e60416b57fc83425a78058853509c1b0629c180094904a5", - "sha256:a66d3508133af6e8548451b25058d5812812ec3798c886bf38ed24a98216fab2", - "sha256:a8c4917bd7ad33e8eb21e9a5bbba979b49d9a97acb3a803092cbc1133e20343c", - "sha256:b3bbeb01c2b273cca1e1e0c5df57f12dce9a4dd331b4fa1635b8bec26350bde3", - "sha256:cba9d6b9a7d64d4bd46167096fc9d2f835e25d7e4c121fb2ddfc6528fb0413b2", - "sha256:cc4d65aeeaa04136a12677d3dd0b1c0c94dc43abac5860ab33cceb42b801c1e8", - "sha256:ce4bcc037df4fc5e3d184794f27bdaab018943698f4ca31630bc7f84a7b69c6d", - "sha256:cec7d9412a9102bdc577382c3929b337320c4c4c4849f2c5cdd14d7368c5562d", - "sha256:d400bfb9a37b1351253cb402671cea7e89bdecc294e8016a707f6d1d8ac934f9", - "sha256:d61f4695e6c866a23a21acab0509af1cdfd2c013cf256bbf5b6b5e2695827162", - "sha256:db0fbb9c62743ce59a9ff687eb5f4afbe77e5e8403d6697f7446e5f609976f76", - "sha256:dd86c085fae2efd48ac91dd7ccffcfc0571387fe1193d33b6394db7ef31fe2a4", - "sha256:e00b098126fd45523dd056d2efba6c5a63b71ffe9f2bbe1a4fe1716e1d0c331e", - "sha256:e229a521186c75c8ad9490854fd8bbdd9a0c9aa3a524326b55be83b54d4e0ad9", - "sha256:e263d77ee3dd201c3a142934a086a4450861778baaeeb45db4591ef65550b0a6", - "sha256:ed9cb427ba5504c1dc15ede7d516b84757c3e3d7868ccc85121d9310d27eed0b", - "sha256:fa6693661a4c91757f4412306191b6dc88c1703f780c8234035eac011922bc01", - "sha256:fcd131dd944808b5bdb38e6f5b53013c5aa4f334c5cad0c72742f6eba4b73db0" - ], - "version": "==1.15.1" - }, - "charset-normalizer": { - "hashes": [ - "sha256:04afa6387e2b282cf78ff3dbce20f0cc071c12dc8f685bd40960cc68644cfea6", - "sha256:04eefcee095f58eaabe6dc3cc2262f3bcd776d2c67005880894f447b3f2cb9c1", - "sha256:0be65ccf618c1e7ac9b849c315cc2e8a8751d9cfdaa43027d4f6624bd587ab7e", - "sha256:0c95f12b74681e9ae127728f7e5409cbbef9cd914d5896ef238cc779b8152373", - "sha256:0ca564606d2caafb0abe6d1b5311c2649e8071eb241b2d64e75a0d0065107e62", - "sha256:10c93628d7497c81686e8e5e557aafa78f230cd9e77dd0c40032ef90c18f2230", - "sha256:11d117e6c63e8f495412d37e7dc2e2fff09c34b2d09dbe2bee3c6229577818be", - "sha256:11d3bcb7be35e7b1bba2c23beedac81ee893ac9871d0ba79effc7fc01167db6c", - "sha256:12a2b561af122e3d94cdb97fe6fb2bb2b82cef0cdca131646fdb940a1eda04f0", - "sha256:12d1a39aa6b8c6f6248bb54550efcc1c38ce0d8096a146638fd4738e42284448", - "sha256:1435ae15108b1cb6fffbcea2af3d468683b7afed0169ad718451f8db5d1aff6f", - "sha256:1c60b9c202d00052183c9be85e5eaf18a4ada0a47d188a83c8f5c5b23252f649", - "sha256:1e8fcdd8f672a1c4fc8d0bd3a2b576b152d2a349782d1eb0f6b8e52e9954731d", - "sha256:20064ead0717cf9a73a6d1e779b23d149b53daf971169289ed2ed43a71e8d3b0", - "sha256:21fa558996782fc226b529fdd2ed7866c2c6ec91cee82735c98a197fae39f706", - "sha256:22908891a380d50738e1f978667536f6c6b526a2064156203d418f4856d6e86a", - "sha256:3160a0fd9754aab7d47f95a6b63ab355388d890163eb03b2d2b87ab0a30cfa59", - "sha256:322102cdf1ab682ecc7d9b1c5eed4ec59657a65e1c146a0da342b78f4112db23", - "sha256:34e0a2f9c370eb95597aae63bf85eb5e96826d81e3dcf88b8886012906f509b5", - "sha256:3573d376454d956553c356df45bb824262c397c6e26ce43e8203c4c540ee0acb", - "sha256:3747443b6a904001473370d7810aa19c3a180ccd52a7157aacc264a5ac79265e", - "sha256:38e812a197bf8e71a59fe55b757a84c1f946d0ac114acafaafaf21667a7e169e", - "sha256:3a06f32c9634a8705f4ca9946d667609f52cf130d5548881401f1eb2c39b1e2c", - "sha256:3a5fc78f9e3f501a1614a98f7c54d3969f3ad9bba8ba3d9b438c3bc5d047dd28", - "sha256:3d9098b479e78c85080c98e1e35ff40b4a31d8953102bb0fd7d1b6f8a2111a3d", - "sha256:3dc5b6a8ecfdc5748a7e429782598e4f17ef378e3e272eeb1340ea57c9109f41", - "sha256:4155b51ae05ed47199dc5b2a4e62abccb274cee6b01da5b895099b61b1982974", - "sha256:49919f8400b5e49e961f320c735388ee686a62327e773fa5b3ce6721f7e785ce", - "sha256:53d0a3fa5f8af98a1e261de6a3943ca631c526635eb5817a87a59d9a57ebf48f", - "sha256:5f008525e02908b20e04707a4f704cd286d94718f48bb33edddc7d7b584dddc1", - "sha256:628c985afb2c7d27a4800bfb609e03985aaecb42f955049957814e0491d4006d", - "sha256:65ed923f84a6844de5fd29726b888e58c62820e0769b76565480e1fdc3d062f8", - "sha256:6734e606355834f13445b6adc38b53c0fd45f1a56a9ba06c2058f86893ae8017", - "sha256:6baf0baf0d5d265fa7944feb9f7451cc316bfe30e8df1a61b1bb08577c554f31", - "sha256:6f4f4668e1831850ebcc2fd0b1cd11721947b6dc7c00bf1c6bd3c929ae14f2c7", - "sha256:6f5c2e7bc8a4bf7c426599765b1bd33217ec84023033672c1e9a8b35eaeaaaf8", - "sha256:6f6c7a8a57e9405cad7485f4c9d3172ae486cfef1344b5ddd8e5239582d7355e", - "sha256:7381c66e0561c5757ffe616af869b916c8b4e42b367ab29fedc98481d1e74e14", - "sha256:73dc03a6a7e30b7edc5b01b601e53e7fc924b04e1835e8e407c12c037e81adbd", - "sha256:74db0052d985cf37fa111828d0dd230776ac99c740e1a758ad99094be4f1803d", - "sha256:75f2568b4189dda1c567339b48cba4ac7384accb9c2a7ed655cd86b04055c795", - "sha256:78cacd03e79d009d95635e7d6ff12c21eb89b894c354bd2b2ed0b4763373693b", - "sha256:80d1543d58bd3d6c271b66abf454d437a438dff01c3e62fdbcd68f2a11310d4b", - "sha256:830d2948a5ec37c386d3170c483063798d7879037492540f10a475e3fd6f244b", - "sha256:891cf9b48776b5c61c700b55a598621fdb7b1e301a550365571e9624f270c203", - "sha256:8f25e17ab3039b05f762b0a55ae0b3632b2e073d9c8fc88e89aca31a6198e88f", - "sha256:9a3267620866c9d17b959a84dd0bd2d45719b817245e49371ead79ed4f710d19", - "sha256:a04f86f41a8916fe45ac5024ec477f41f886b3c435da2d4e3d2709b22ab02af1", - "sha256:aaf53a6cebad0eae578f062c7d462155eada9c172bd8c4d250b8c1d8eb7f916a", - "sha256:abc1185d79f47c0a7aaf7e2412a0eb2c03b724581139193d2d82b3ad8cbb00ac", - "sha256:ac0aa6cd53ab9a31d397f8303f92c42f534693528fafbdb997c82bae6e477ad9", - "sha256:ac3775e3311661d4adace3697a52ac0bab17edd166087d493b52d4f4f553f9f0", - "sha256:b06f0d3bf045158d2fb8837c5785fe9ff9b8c93358be64461a1089f5da983137", - "sha256:b116502087ce8a6b7a5f1814568ccbd0e9f6cfd99948aa59b0e241dc57cf739f", - "sha256:b82fab78e0b1329e183a65260581de4375f619167478dddab510c6c6fb04d9b6", - "sha256:bd7163182133c0c7701b25e604cf1611c0d87712e56e88e7ee5d72deab3e76b5", - "sha256:c36bcbc0d5174a80d6cccf43a0ecaca44e81d25be4b7f90f0ed7bcfbb5a00909", - "sha256:c3af8e0f07399d3176b179f2e2634c3ce9c1301379a6b8c9c9aeecd481da494f", - "sha256:c84132a54c750fda57729d1e2599bb598f5fa0344085dbde5003ba429a4798c0", - "sha256:cb7b2ab0188829593b9de646545175547a70d9a6e2b63bf2cd87a0a391599324", - "sha256:cca4def576f47a09a943666b8f829606bcb17e2bc2d5911a46c8f8da45f56755", - "sha256:cf6511efa4801b9b38dc5546d7547d5b5c6ef4b081c60b23e4d941d0eba9cbeb", - "sha256:d16fd5252f883eb074ca55cb622bc0bee49b979ae4e8639fff6ca3ff44f9f854", - "sha256:d2686f91611f9e17f4548dbf050e75b079bbc2a82be565832bc8ea9047b61c8c", - "sha256:d7fc3fca01da18fbabe4625d64bb612b533533ed10045a2ac3dd194bfa656b60", - "sha256:dd5653e67b149503c68c4018bf07e42eeed6b4e956b24c00ccdf93ac79cdff84", - "sha256:de5695a6f1d8340b12a5d6d4484290ee74d61e467c39ff03b39e30df62cf83a0", - "sha256:e0ac8959c929593fee38da1c2b64ee9778733cdf03c482c9ff1d508b6b593b2b", - "sha256:e1b25e3ad6c909f398df8921780d6a3d120d8c09466720226fc621605b6f92b1", - "sha256:e633940f28c1e913615fd624fcdd72fdba807bf53ea6925d6a588e84e1151531", - "sha256:e89df2958e5159b811af9ff0f92614dabf4ff617c03a4c1c6ff53bf1c399e0e1", - "sha256:ea9f9c6034ea2d93d9147818f17c2a0860d41b71c38b9ce4d55f21b6f9165a11", - "sha256:f645caaf0008bacf349875a974220f1f1da349c5dbe7c4ec93048cdc785a3326", - "sha256:f8303414c7b03f794347ad062c0516cee0e15f7a612abd0ce1e25caf6ceb47df", - "sha256:fca62a8301b605b954ad2e9c3666f9d97f63872aa4efcae5492baca2056b74ab" - ], - "markers": "python_full_version >= '3.7.0'", - "version": "==3.1.0" - }, - "click": { - "hashes": [ - "sha256:7682dc8afb30297001674575ea00d1814d808d6a36af415a82bd481d37ba7b8e", - "sha256:bb4d8133cb15a609f44e8213d9b391b0809795062913b383c62be0ee95b1db48" - ], - "markers": "python_version >= '3.7'", - "version": "==8.1.3" - }, - "colorama": { - "hashes": [ - "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44", - "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6" - ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4, 3.5, 3.6'", - "version": "==0.4.6" - }, - "colorlog": { - "hashes": [ - "sha256:3dd15cb27e8119a24c1a7b5c93f9f3b455855e0f73993b1c25921b2f646f1dcd", - "sha256:59b53160c60902c405cdec28d38356e09d40686659048893e026ecbd589516b1" - ], - "index": "pypi", - "version": "==4.8.0" - }, - "constructs": { - "hashes": [ - "sha256:94e8f2dc238e30129013a808f23a109a4c5749b94616a8c51c2597ce49bd623c", - "sha256:e99390593511ceec1964beeab0977c6df4ca4dbdf0bf17dc3391471fb202e9fb" - ], - "index": "pypi", - "version": "==10.1.302" - }, - "coverage": { - "hashes": [ - "sha256:006ed5582e9cbc8115d2e22d6d2144a0725db542f654d9d4fda86793832f873d", - "sha256:046936ab032a2810dcaafd39cc4ef6dd295df1a7cbead08fe996d4765fca9fe4", - "sha256:0484d9dd1e6f481b24070c87561c8d7151bdd8b044c93ac99faafd01f695c78e", - "sha256:0ce383d5f56d0729d2dd40e53fe3afeb8f2237244b0975e1427bfb2cf0d32bab", - "sha256:186e0fc9cf497365036d51d4d2ab76113fb74f729bd25da0975daab2e107fd90", - "sha256:2199988e0bc8325d941b209f4fd1c6fa007024b1442c5576f1a32ca2e48941e6", - "sha256:299bc75cb2a41e6741b5e470b8c9fb78d931edbd0cd009c58e5c84de57c06731", - "sha256:3668291b50b69a0c1ef9f462c7df2c235da3c4073f49543b01e7eb1dee7dd540", - "sha256:36dd42da34fe94ed98c39887b86db9d06777b1c8f860520e21126a75507024f2", - "sha256:38004671848b5745bb05d4d621526fca30cee164db42a1f185615f39dc997292", - "sha256:387fb46cb8e53ba7304d80aadca5dca84a2fbf6fe3faf6951d8cf2d46485d1e5", - "sha256:3eb55b7b26389dd4f8ae911ba9bc8c027411163839dea4c8b8be54c4ee9ae10b", - "sha256:420f94a35e3e00a2b43ad5740f935358e24478354ce41c99407cddd283be00d2", - "sha256:4ac0f522c3b6109c4b764ffec71bf04ebc0523e926ca7cbe6c5ac88f84faced0", - "sha256:4c752d5264053a7cf2fe81c9e14f8a4fb261370a7bb344c2a011836a96fb3f57", - "sha256:4f01911c010122f49a3e9bdc730eccc66f9b72bd410a3a9d3cb8448bb50d65d3", - "sha256:4f68ee32d7c4164f1e2c8797535a6d0a3733355f5861e0f667e37df2d4b07140", - "sha256:4fa54fb483decc45f94011898727802309a109d89446a3c76387d016057d2c84", - "sha256:507e4720791977934bba016101579b8c500fb21c5fa3cd4cf256477331ddd988", - "sha256:53d0fd4c17175aded9c633e319360d41a1f3c6e352ba94edcb0fa5167e2bad67", - "sha256:55272f33da9a5d7cccd3774aeca7a01e500a614eaea2a77091e9be000ecd401d", - "sha256:5764e1f7471cb8f64b8cda0554f3d4c4085ae4b417bfeab236799863703e5de2", - "sha256:57b77b9099f172804e695a40ebaa374f79e4fb8b92f3e167f66facbf92e8e7f5", - "sha256:5afdad4cc4cc199fdf3e18088812edcf8f4c5a3c8e6cb69127513ad4cb7471a9", - "sha256:5cc0783844c84af2522e3a99b9b761a979a3ef10fb87fc4048d1ee174e18a7d8", - "sha256:5e1df45c23d4230e3d56d04414f9057eba501f78db60d4eeecfcb940501b08fd", - "sha256:6146910231ece63facfc5984234ad1b06a36cecc9fd0c028e59ac7c9b18c38c6", - "sha256:797aad79e7b6182cb49c08cc5d2f7aa7b2128133b0926060d0a8889ac43843be", - "sha256:7c20b731211261dc9739bbe080c579a1835b0c2d9b274e5fcd903c3a7821cf88", - "sha256:817295f06eacdc8623dc4df7d8b49cea65925030d4e1e2a7c7218380c0072c25", - "sha256:81f63e0fb74effd5be736cfe07d710307cc0a3ccb8f4741f7f053c057615a137", - "sha256:872d6ce1f5be73f05bea4df498c140b9e7ee5418bfa2cc8204e7f9b817caa968", - "sha256:8c99cb7c26a3039a8a4ee3ca1efdde471e61b4837108847fb7d5be7789ed8fd9", - "sha256:8dbe2647bf58d2c5a6c5bcc685f23b5f371909a5624e9f5cd51436d6a9f6c6ef", - "sha256:8efb48fa743d1c1a65ee8787b5b552681610f06c40a40b7ef94a5b517d885c54", - "sha256:92ebc1619650409da324d001b3a36f14f63644c7f0a588e331f3b0f67491f512", - "sha256:9d22e94e6dc86de981b1b684b342bec5e331401599ce652900ec59db52940005", - "sha256:ba279aae162b20444881fc3ed4e4f934c1cf8620f3dab3b531480cf602c76b7f", - "sha256:bc4803779f0e4b06a2361f666e76f5c2e3715e8e379889d02251ec911befd149", - "sha256:bfe7085783cda55e53510482fa7b5efc761fad1abe4d653b32710eb548ebdd2d", - "sha256:c448b5c9e3df5448a362208b8d4b9ed85305528313fca1b479f14f9fe0d873b8", - "sha256:c90e73bdecb7b0d1cea65a08cb41e9d672ac6d7995603d6465ed4914b98b9ad7", - "sha256:d2b96123a453a2d7f3995ddb9f28d01fd112319a7a4d5ca99796a7ff43f02af5", - "sha256:d52f0a114b6a58305b11a5cdecd42b2e7f1ec77eb20e2b33969d702feafdd016", - "sha256:d530191aa9c66ab4f190be8ac8cc7cfd8f4f3217da379606f3dd4e3d83feba69", - "sha256:d683d230b5774816e7d784d7ed8444f2a40e7a450e5720d58af593cb0b94a212", - "sha256:db45eec1dfccdadb179b0f9ca616872c6f700d23945ecc8f21bb105d74b1c5fc", - "sha256:db8c2c5ace167fd25ab5dd732714c51d4633f58bac21fb0ff63b0349f62755a8", - "sha256:e2926b8abedf750c2ecf5035c07515770944acf02e1c46ab08f6348d24c5f94d", - "sha256:e627dee428a176ffb13697a2c4318d3f60b2ccdde3acdc9b3f304206ec130ccd", - "sha256:efe1c0adad110bf0ad7fb59f833880e489a61e39d699d37249bdf42f80590169" - ], - "markers": "python_version >= '3.7'", - "version": "==7.2.2" - }, - "cryptography": { - "hashes": [ - "sha256:0a4e3406cfed6b1f6d6e87ed243363652b2586b2d917b0609ca4f97072994405", - "sha256:1e0af458515d5e4028aad75f3bb3fe7a31e46ad920648cd59b64d3da842e4356", - "sha256:2803f2f8b1e95f614419926c7e6f55d828afc614ca5ed61543877ae668cc3472", - "sha256:28d63d75bf7ae4045b10de5413fb1d6338616e79015999ad9cf6fc538f772d41", - "sha256:32057d3d0ab7d4453778367ca43e99ddb711770477c4f072a51b3ca69602780a", - "sha256:3a4805a4ca729d65570a1b7cac84eac1e431085d40387b7d3bbaa47e39890b88", - "sha256:63dac2d25c47f12a7b8aa60e528bfb3c51c5a6c5a9f7c86987909c6c79765554", - "sha256:650883cc064297ef3676b1db1b7b1df6081794c4ada96fa457253c4cc40f97db", - "sha256:6f2bbd72f717ce33100e6467572abaedc61f1acb87b8d546001328d7f466b778", - "sha256:7c872413353c70e0263a9368c4993710070e70ab3e5318d85510cc91cce77e7c", - "sha256:918cb89086c7d98b1b86b9fdb70c712e5a9325ba6f7d7cfb509e784e0cfc6917", - "sha256:9618a87212cb5200500e304e43691111570e1f10ec3f35569fdfcd17e28fd797", - "sha256:a805a7bce4a77d51696410005b3e85ae2839bad9aa38894afc0aa99d8e0c3160", - "sha256:cc3a621076d824d75ab1e1e530e66e7e8564e357dd723f2533225d40fe35c60c", - "sha256:cd033d74067d8928ef00a6b1327c8ea0452523967ca4463666eeba65ca350d4c", - "sha256:cf91e428c51ef692b82ce786583e214f58392399cf65c341bc7301d096fa3ba2", - "sha256:d36bbeb99704aabefdca5aee4eba04455d7a27ceabd16f3b3ba9bdcc31da86c4", - "sha256:d8aa3609d337ad85e4eb9bb0f8bcf6e4409bfb86e706efa9a027912169e89122", - "sha256:f5d7b79fa56bc29580faafc2ff736ce05ba31feaa9d4735048b0de7d9ceb2b94" - ], - "markers": "python_version >= '3.6'", - "version": "==40.0.1" - }, - "dill": { - "hashes": [ - "sha256:a07ffd2351b8c678dfc4a856a3005f8067aea51d6ba6c700796a4d9e280f39f0", - "sha256:e5db55f3687856d8fbdab002ed78544e1c4559a130302693d839dfe8f93f2373" - ], - "markers": "python_version < '3.11'", - "version": "==0.3.6" - }, - "docformatter": { - "hashes": [ - "sha256:2346dcc239b73bc4b62d1035e240d1338d154fb047a7e492f0168a93744222e2", - "sha256:dfad58437b560708eb74ccaccba5c91a0f98f534ed51b7af02aa35225e9eb6c2" - ], - "index": "pypi", - "version": "==1.6.0" - }, - "docutils": { - "hashes": [ - "sha256:33995a6753c30b7f577febfc2c50411fec6aac7f7ffeb7c4cfe5991072dcf9e6", - "sha256:5e1de4d849fee02c63b040a4a3fd567f4ab104defd8a5511fbbc24a8a017efbc" - ], - "markers": "python_version >= '3.7'", - "version": "==0.19" - }, - "exceptiongroup": { - "hashes": [ - "sha256:232c37c63e4f682982c8b6459f33a8981039e5fb8756b2074364e5055c498c9e", - "sha256:d484c3090ba2889ae2928419117447a14daf3c1231d5e30d0aae34f354f01785" - ], - "markers": "python_version < '3.11'", - "version": "==1.1.1" - }, - "ghp-import": { - "hashes": [ - "sha256:8337dd7b50877f163d4c0289bc1f1c7f127550241988d568c1db512c4324a619", - "sha256:9c535c4c61193c2df8871222567d7fd7e5014d835f97dc7b7439069e2413d343" - ], - "version": "==2.1.0" - }, - "gitdb": { - "hashes": [ - "sha256:6eb990b69df4e15bad899ea868dc46572c3f75339735663b81de79b06f17eb9a", - "sha256:c286cf298426064079ed96a9e4a9d39e7f3e9bf15ba60701e95f5492f28415c7" - ], - "markers": "python_version >= '3.7'", - "version": "==4.0.10" - }, - "gitlint": { - "hashes": [ - "sha256:26bb085959148d99fbbc178b4e56fda6c3edd7646b7c2a24d8ee1f8e036ed85d", - "sha256:b5b70fb894e80849b69abbb65ee7dbb3520fc3511f202a6e6b6ddf1a71ee8f61" - ], - "index": "pypi", - "version": "==0.19.1" - }, - "gitlint-core": { - "extras": [ - "trusted-deps" - ], - "hashes": [ - "sha256:7bf977b03ff581624a9e03f65ebb8502cc12dfaa3e92d23e8b2b54bbdaa29992", - "sha256:f41effd1dcbc06ffbfc56b6888cce72241796f517b46bd9fd4ab1b145056988c" - ], - "markers": "python_version >= '3.7'", - "version": "==0.19.1" - }, - "gitpython": { - "hashes": [ - "sha256:8ce3bcf69adfdf7c7d503e78fd3b1c492af782d58893b650adb2ac8912ddd573", - "sha256:f04893614f6aa713a60cbbe1e6a97403ef633103cdd0ef5eb6efe0deb98dbe8d" - ], - "markers": "python_version >= '3.7'", - "version": "==3.1.31" - }, - "h11": { - "hashes": [ - "sha256:8f19fbbe99e72420ff35c00b27a34cb9937e902a8b810e2c88300c6f0a3b699d", - "sha256:e3fe4ac4b851c468cc8363d500db52c2ead036020723024a109d37346efaa761" - ], - "markers": "python_version >= '3.7'", - "version": "==0.14.0" - }, - "httpcore": { - "hashes": [ - "sha256:c5d6f04e2fc530f39e0c077e6a30caa53f1451096120f1f38b954afd0b17c0cb", - "sha256:da1fb708784a938aa084bde4feb8317056c55037247c787bd7e19eb2c2949dc0" - ], - "markers": "python_version >= '3.7'", - "version": "==0.16.3" - }, - "httpx": { - "hashes": [ - "sha256:9818458eb565bb54898ccb9b8b251a28785dd4a55afbc23d0eb410754fe7d0f9", - "sha256:a211fcce9b1254ea24f0cd6af9869b3d29aba40154e947d2a07bb499b3e310d6" - ], - "index": "pypi", - "version": "==0.23.3" - }, - "idna": { - "hashes": [ - "sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4", - "sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2" - ], - "version": "==3.4" - }, - "importlib-metadata": { - "hashes": [ - "sha256:43ce9281e097583d758c2c708c4376371261a02c34682491a8e98352365aad20", - "sha256:ff80f3b5394912eb1b108fcfd444dc78b7f1f3e16b16188054bd01cb9cb86f09" - ], - "markers": "python_version < '3.10'", - "version": "==6.1.0" - }, - "importlib-resources": { - "hashes": [ - "sha256:4be82589bf5c1d7999aedf2a45159d10cb3ca4f19b2271f8792bc8e6da7b22f6", - "sha256:7b1deeebbf351c7578e09bf2f63fa2ce8b5ffec296e0d349139d43cca061a81a" - ], - "markers": "python_version >= '3.7'", - "version": "==5.12.0" - }, - "iniconfig": { - "hashes": [ - "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3", - "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374" - ], - "markers": "python_version >= '3.7'", - "version": "==2.0.0" - }, - "isort": { - "hashes": [ - "sha256:8bef7dde241278824a6d83f44a544709b065191b95b6e50894bdc722fcba0504", - "sha256:f84c2818376e66cf843d497486ea8fed8700b340f308f076c6fb1229dff318b6" - ], - "index": "pypi", - "version": "==5.12.0" - }, - "jaraco.classes": { - "hashes": [ - "sha256:2353de3288bc6b82120752201c6b1c1a14b058267fa424ed5ce5984e3b922158", - "sha256:89559fa5c1d3c34eff6f631ad80bb21f378dbcbb35dd161fd2c6b93f5be2f98a" - ], - "markers": "python_version >= '3.7'", - "version": "==3.2.3" - }, - "jinja2": { - "hashes": [ - "sha256:31351a702a408a9e7595a8fc6150fc3f43bb6bf7e319770cbc0db9df9437e852", - "sha256:6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61" - ], - "markers": "python_version >= '3.7'", - "version": "==3.1.2" - }, - "jmespath": { - "hashes": [ - "sha256:02e2e4cc71b5bcab88332eebf907519190dd9e6e82107fa7f83b1003a6252980", - "sha256:90261b206d6defd58fdd5e85f478bf633a2901798906be2ad389150c5c60edbe" - ], - "markers": "python_version >= '3.7'", - "version": "==1.0.1" - }, - "jsii": { - "hashes": [ - "sha256:4da63ab99f2696cd063574460c94221f0a7de9d345e71dfb19dfbcecf8ca8355", - "sha256:ea3cace063f6a47cdf0a74c929618d779efab426fedb7692a8ac1b9b29797f8c" - ], - "markers": "python_version ~= '3.7'", - "version": "==1.80.0" - }, - "keyring": { - "hashes": [ - "sha256:771ed2a91909389ed6148631de678f82ddc73737d85a927f382a8a1b157898cd", - "sha256:ba2e15a9b35e21908d0aaf4e0a47acc52d6ae33444df0da2b49d41a46ef6d678" - ], - "markers": "python_version >= '3.7'", - "version": "==23.13.1" - }, - "lazy-object-proxy": { - "hashes": [ - "sha256:09763491ce220c0299688940f8dc2c5d05fd1f45af1e42e636b2e8b2303e4382", - "sha256:0a891e4e41b54fd5b8313b96399f8b0e173bbbfc03c7631f01efbe29bb0bcf82", - "sha256:189bbd5d41ae7a498397287c408617fe5c48633e7755287b21d741f7db2706a9", - "sha256:18b78ec83edbbeb69efdc0e9c1cb41a3b1b1ed11ddd8ded602464c3fc6020494", - "sha256:1aa3de4088c89a1b69f8ec0dcc169aa725b0ff017899ac568fe44ddc1396df46", - "sha256:212774e4dfa851e74d393a2370871e174d7ff0ebc980907723bb67d25c8a7c30", - "sha256:2d0daa332786cf3bb49e10dc6a17a52f6a8f9601b4cf5c295a4f85854d61de63", - "sha256:5f83ac4d83ef0ab017683d715ed356e30dd48a93746309c8f3517e1287523ef4", - "sha256:659fb5809fa4629b8a1ac5106f669cfc7bef26fbb389dda53b3e010d1ac4ebae", - "sha256:660c94ea760b3ce47d1855a30984c78327500493d396eac4dfd8bd82041b22be", - "sha256:66a3de4a3ec06cd8af3f61b8e1ec67614fbb7c995d02fa224813cb7afefee701", - "sha256:721532711daa7db0d8b779b0bb0318fa87af1c10d7fe5e52ef30f8eff254d0cd", - "sha256:7322c3d6f1766d4ef1e51a465f47955f1e8123caee67dd641e67d539a534d006", - "sha256:79a31b086e7e68b24b99b23d57723ef7e2c6d81ed21007b6281ebcd1688acb0a", - "sha256:81fc4d08b062b535d95c9ea70dbe8a335c45c04029878e62d744bdced5141586", - "sha256:8fa02eaab317b1e9e03f69aab1f91e120e7899b392c4fc19807a8278a07a97e8", - "sha256:9090d8e53235aa280fc9239a86ae3ea8ac58eff66a705fa6aa2ec4968b95c821", - "sha256:946d27deaff6cf8452ed0dba83ba38839a87f4f7a9732e8f9fd4107b21e6ff07", - "sha256:9990d8e71b9f6488e91ad25f322898c136b008d87bf852ff65391b004da5e17b", - "sha256:9cd077f3d04a58e83d04b20e334f678c2b0ff9879b9375ed107d5d07ff160171", - "sha256:9e7551208b2aded9c1447453ee366f1c4070602b3d932ace044715d89666899b", - "sha256:9f5fa4a61ce2438267163891961cfd5e32ec97a2c444e5b842d574251ade27d2", - "sha256:b40387277b0ed2d0602b8293b94d7257e17d1479e257b4de114ea11a8cb7f2d7", - "sha256:bfb38f9ffb53b942f2b5954e0f610f1e721ccebe9cce9025a38c8ccf4a5183a4", - "sha256:cbf9b082426036e19c6924a9ce90c740a9861e2bdc27a4834fd0a910742ac1e8", - "sha256:d9e25ef10a39e8afe59a5c348a4dbf29b4868ab76269f81ce1674494e2565a6e", - "sha256:db1c1722726f47e10e0b5fdbf15ac3b8adb58c091d12b3ab713965795036985f", - "sha256:e7c21c95cae3c05c14aafffe2865bbd5e377cfc1348c4f7751d9dc9a48ca4bda", - "sha256:e8c6cfb338b133fbdbc5cfaa10fe3c6aeea827db80c978dbd13bc9dd8526b7d4", - "sha256:ea806fd4c37bf7e7ad82537b0757999264d5f70c45468447bb2b91afdbe73a6e", - "sha256:edd20c5a55acb67c7ed471fa2b5fb66cb17f61430b7a6b9c3b4a1e40293b1671", - "sha256:f0117049dd1d5635bbff65444496c90e0baa48ea405125c088e93d9cf4525b11", - "sha256:f0705c376533ed2a9e5e97aacdbfe04cecd71e0aa84c7c0595d02ef93b6e4455", - "sha256:f12ad7126ae0c98d601a7ee504c1122bcef553d1d5e0c3bfa77b16b3968d2734", - "sha256:f2457189d8257dd41ae9b434ba33298aec198e30adf2dcdaaa3a28b9994f6adb", - "sha256:f699ac1c768270c9e384e4cbd268d6e67aebcfae6cd623b4d7c3bfde5a35db59" - ], - "markers": "python_version >= '3.7'", - "version": "==1.9.0" - }, - "lxml": { - "hashes": [ - "sha256:01d36c05f4afb8f7c20fd9ed5badca32a2029b93b1750f571ccc0b142531caf7", - "sha256:04876580c050a8c5341d706dd464ff04fd597095cc8c023252566a8826505726", - "sha256:05ca3f6abf5cf78fe053da9b1166e062ade3fa5d4f92b4ed688127ea7d7b1d03", - "sha256:090c6543d3696cbe15b4ac6e175e576bcc3f1ccfbba970061b7300b0c15a2140", - "sha256:0dc313ef231edf866912e9d8f5a042ddab56c752619e92dfd3a2c277e6a7299a", - "sha256:0f2b1e0d79180f344ff9f321327b005ca043a50ece8713de61d1cb383fb8ac05", - "sha256:13598ecfbd2e86ea7ae45ec28a2a54fb87ee9b9fdb0f6d343297d8e548392c03", - "sha256:16efd54337136e8cd72fb9485c368d91d77a47ee2d42b057564aae201257d419", - "sha256:1ab8f1f932e8f82355e75dda5413a57612c6ea448069d4fb2e217e9a4bed13d4", - "sha256:223f4232855ade399bd409331e6ca70fb5578efef22cf4069a6090acc0f53c0e", - "sha256:2455cfaeb7ac70338b3257f41e21f0724f4b5b0c0e7702da67ee6c3640835b67", - "sha256:2899456259589aa38bfb018c364d6ae7b53c5c22d8e27d0ec7609c2a1ff78b50", - "sha256:2a29ba94d065945944016b6b74e538bdb1751a1db6ffb80c9d3c2e40d6fa9894", - "sha256:2a87fa548561d2f4643c99cd13131acb607ddabb70682dcf1dff5f71f781a4bf", - "sha256:2e430cd2824f05f2d4f687701144556646bae8f249fd60aa1e4c768ba7018947", - "sha256:36c3c175d34652a35475a73762b545f4527aec044910a651d2bf50de9c3352b1", - "sha256:3818b8e2c4b5148567e1b09ce739006acfaa44ce3156f8cbbc11062994b8e8dd", - "sha256:3ab9fa9d6dc2a7f29d7affdf3edebf6ece6fb28a6d80b14c3b2fb9d39b9322c3", - "sha256:3efea981d956a6f7173b4659849f55081867cf897e719f57383698af6f618a92", - "sha256:4c8f293f14abc8fd3e8e01c5bd86e6ed0b6ef71936ded5bf10fe7a5efefbaca3", - "sha256:5344a43228767f53a9df6e5b253f8cdca7dfc7b7aeae52551958192f56d98457", - "sha256:58bfa3aa19ca4c0f28c5dde0ff56c520fbac6f0daf4fac66ed4c8d2fb7f22e74", - "sha256:5b4545b8a40478183ac06c073e81a5ce4cf01bf1734962577cf2bb569a5b3bbf", - "sha256:5f50a1c177e2fa3ee0667a5ab79fdc6b23086bc8b589d90b93b4bd17eb0e64d1", - "sha256:63da2ccc0857c311d764e7d3d90f429c252e83b52d1f8f1d1fe55be26827d1f4", - "sha256:6749649eecd6a9871cae297bffa4ee76f90b4504a2a2ab528d9ebe912b101975", - "sha256:6804daeb7ef69e7b36f76caddb85cccd63d0c56dedb47555d2fc969e2af6a1a5", - "sha256:689bb688a1db722485e4610a503e3e9210dcc20c520b45ac8f7533c837be76fe", - "sha256:699a9af7dffaf67deeae27b2112aa06b41c370d5e7633e0ee0aea2e0b6c211f7", - "sha256:6b418afe5df18233fc6b6093deb82a32895b6bb0b1155c2cdb05203f583053f1", - "sha256:76cf573e5a365e790396a5cc2b909812633409306c6531a6877c59061e42c4f2", - "sha256:7b515674acfdcadb0eb5d00d8a709868173acece5cb0be3dd165950cbfdf5409", - "sha256:7b770ed79542ed52c519119473898198761d78beb24b107acf3ad65deae61f1f", - "sha256:7d2278d59425777cfcb19735018d897ca8303abe67cc735f9f97177ceff8027f", - "sha256:7e91ee82f4199af8c43d8158024cbdff3d931df350252288f0d4ce656df7f3b5", - "sha256:821b7f59b99551c69c85a6039c65b75f5683bdc63270fec660f75da67469ca24", - "sha256:822068f85e12a6e292803e112ab876bc03ed1f03dddb80154c395f891ca6b31e", - "sha256:8340225bd5e7a701c0fa98284c849c9b9fc9238abf53a0ebd90900f25d39a4e4", - "sha256:85cabf64adec449132e55616e7ca3e1000ab449d1d0f9d7f83146ed5bdcb6d8a", - "sha256:880bbbcbe2fca64e2f4d8e04db47bcdf504936fa2b33933efd945e1b429bea8c", - "sha256:8d0b4612b66ff5d62d03bcaa043bb018f74dfea51184e53f067e6fdcba4bd8de", - "sha256:8e20cb5a47247e383cf4ff523205060991021233ebd6f924bca927fcf25cf86f", - "sha256:925073b2fe14ab9b87e73f9a5fde6ce6392da430f3004d8b72cc86f746f5163b", - "sha256:998c7c41910666d2976928c38ea96a70d1aa43be6fe502f21a651e17483a43c5", - "sha256:9b22c5c66f67ae00c0199f6055705bc3eb3fcb08d03d2ec4059a2b1b25ed48d7", - "sha256:9f102706d0ca011de571de32c3247c6476b55bb6bc65a20f682f000b07a4852a", - "sha256:a08cff61517ee26cb56f1e949cca38caabe9ea9fbb4b1e10a805dc39844b7d5c", - "sha256:a0a336d6d3e8b234a3aae3c674873d8f0e720b76bc1d9416866c41cd9500ffb9", - "sha256:a35f8b7fa99f90dd2f5dc5a9fa12332642f087a7641289ca6c40d6e1a2637d8e", - "sha256:a38486985ca49cfa574a507e7a2215c0c780fd1778bb6290c21193b7211702ab", - "sha256:a5da296eb617d18e497bcf0a5c528f5d3b18dadb3619fbdadf4ed2356ef8d941", - "sha256:a6e441a86553c310258aca15d1c05903aaf4965b23f3bc2d55f200804e005ee5", - "sha256:a82d05da00a58b8e4c0008edbc8a4b6ec5a4bc1e2ee0fb6ed157cf634ed7fa45", - "sha256:ab323679b8b3030000f2be63e22cdeea5b47ee0abd2d6a1dc0c8103ddaa56cd7", - "sha256:b1f42b6921d0e81b1bcb5e395bc091a70f41c4d4e55ba99c6da2b31626c44892", - "sha256:b23e19989c355ca854276178a0463951a653309fb8e57ce674497f2d9f208746", - "sha256:b264171e3143d842ded311b7dccd46ff9ef34247129ff5bf5066123c55c2431c", - "sha256:b26a29f0b7fc6f0897f043ca366142d2b609dc60756ee6e4e90b5f762c6adc53", - "sha256:b64d891da92e232c36976c80ed7ebb383e3f148489796d8d31a5b6a677825efe", - "sha256:b9cc34af337a97d470040f99ba4282f6e6bac88407d021688a5d585e44a23184", - "sha256:bc718cd47b765e790eecb74d044cc8d37d58562f6c314ee9484df26276d36a38", - "sha256:be7292c55101e22f2a3d4d8913944cbea71eea90792bf914add27454a13905df", - "sha256:c83203addf554215463b59f6399835201999b5e48019dc17f182ed5ad87205c9", - "sha256:c9ec3eaf616d67db0764b3bb983962b4f385a1f08304fd30c7283954e6a7869b", - "sha256:ca34efc80a29351897e18888c71c6aca4a359247c87e0b1c7ada14f0ab0c0fb2", - "sha256:ca989b91cf3a3ba28930a9fc1e9aeafc2a395448641df1f387a2d394638943b0", - "sha256:d02a5399126a53492415d4906ab0ad0375a5456cc05c3fc0fc4ca11771745cda", - "sha256:d17bc7c2ccf49c478c5bdd447594e82692c74222698cfc9b5daae7ae7e90743b", - "sha256:d5bf6545cd27aaa8a13033ce56354ed9e25ab0e4ac3b5392b763d8d04b08e0c5", - "sha256:d6b430a9938a5a5d85fc107d852262ddcd48602c120e3dbb02137c83d212b380", - "sha256:da248f93f0418a9e9d94b0080d7ebc407a9a5e6d0b57bb30db9b5cc28de1ad33", - "sha256:da4dd7c9c50c059aba52b3524f84d7de956f7fef88f0bafcf4ad7dde94a064e8", - "sha256:df0623dcf9668ad0445e0558a21211d4e9a149ea8f5666917c8eeec515f0a6d1", - "sha256:e5168986b90a8d1f2f9dc1b841467c74221bd752537b99761a93d2d981e04889", - "sha256:efa29c2fe6b4fdd32e8ef81c1528506895eca86e1d8c4657fda04c9b3786ddf9", - "sha256:f1496ea22ca2c830cbcbd473de8f114a320da308438ae65abad6bab7867fe38f", - "sha256:f49e52d174375a7def9915c9f06ec4e569d235ad428f70751765f48d5926678c" - ], - "index": "pypi", - "version": "==4.9.2" - }, - "markdown": { - "hashes": [ - "sha256:cbb516f16218e643d8e0a95b309f77eb118cb138d39a4f27851e6a63581db874", - "sha256:f5da449a6e1c989a4cea2631aa8ee67caa5a2ef855d551c88f9e309f4634c621" - ], - "markers": "python_version >= '3.6'", - "version": "==3.3.7" - }, - "markdown-it-py": { - "hashes": [ - "sha256:5a35f8d1870171d9acc47b99612dc146129b631baf04970128b568f190d0cc30", - "sha256:7c9a5e412688bc771c67432cbfebcdd686c93ce6484913dccf06cb5a0bea35a1" - ], - "markers": "python_version >= '3.7'", - "version": "==2.2.0" - }, - "markupsafe": { - "hashes": [ - "sha256:0576fe974b40a400449768941d5d0858cc624e3249dfd1e0c33674e5c7ca7aed", - "sha256:085fd3201e7b12809f9e6e9bc1e5c96a368c8523fad5afb02afe3c051ae4afcc", - "sha256:090376d812fb6ac5f171e5938e82e7f2d7adc2b629101cec0db8b267815c85e2", - "sha256:0b462104ba25f1ac006fdab8b6a01ebbfbce9ed37fd37fd4acd70c67c973e460", - "sha256:137678c63c977754abe9086a3ec011e8fd985ab90631145dfb9294ad09c102a7", - "sha256:1bea30e9bf331f3fef67e0a3877b2288593c98a21ccb2cf29b74c581a4eb3af0", - "sha256:22152d00bf4a9c7c83960521fc558f55a1adbc0631fbb00a9471e097b19d72e1", - "sha256:22731d79ed2eb25059ae3df1dfc9cb1546691cc41f4e3130fe6bfbc3ecbbecfa", - "sha256:2298c859cfc5463f1b64bd55cb3e602528db6fa0f3cfd568d3605c50678f8f03", - "sha256:28057e985dace2f478e042eaa15606c7efccb700797660629da387eb289b9323", - "sha256:2e7821bffe00aa6bd07a23913b7f4e01328c3d5cc0b40b36c0bd81d362faeb65", - "sha256:2ec4f2d48ae59bbb9d1f9d7efb9236ab81429a764dedca114f5fdabbc3788013", - "sha256:340bea174e9761308703ae988e982005aedf427de816d1afe98147668cc03036", - "sha256:40627dcf047dadb22cd25ea7ecfe9cbf3bbbad0482ee5920b582f3809c97654f", - "sha256:40dfd3fefbef579ee058f139733ac336312663c6706d1163b82b3003fb1925c4", - "sha256:4cf06cdc1dda95223e9d2d3c58d3b178aa5dacb35ee7e3bbac10e4e1faacb419", - "sha256:50c42830a633fa0cf9e7d27664637532791bfc31c731a87b202d2d8ac40c3ea2", - "sha256:55f44b440d491028addb3b88f72207d71eeebfb7b5dbf0643f7c023ae1fba619", - "sha256:608e7073dfa9e38a85d38474c082d4281f4ce276ac0010224eaba11e929dd53a", - "sha256:63ba06c9941e46fa389d389644e2d8225e0e3e5ebcc4ff1ea8506dce646f8c8a", - "sha256:65608c35bfb8a76763f37036547f7adfd09270fbdbf96608be2bead319728fcd", - "sha256:665a36ae6f8f20a4676b53224e33d456a6f5a72657d9c83c2aa00765072f31f7", - "sha256:6d6607f98fcf17e534162f0709aaad3ab7a96032723d8ac8750ffe17ae5a0666", - "sha256:7313ce6a199651c4ed9d7e4cfb4aa56fe923b1adf9af3b420ee14e6d9a73df65", - "sha256:7668b52e102d0ed87cb082380a7e2e1e78737ddecdde129acadb0eccc5423859", - "sha256:7df70907e00c970c60b9ef2938d894a9381f38e6b9db73c5be35e59d92e06625", - "sha256:7e007132af78ea9df29495dbf7b5824cb71648d7133cf7848a2a5dd00d36f9ff", - "sha256:835fb5e38fd89328e9c81067fd642b3593c33e1e17e2fdbf77f5676abb14a156", - "sha256:8bca7e26c1dd751236cfb0c6c72d4ad61d986e9a41bbf76cb445f69488b2a2bd", - "sha256:8db032bf0ce9022a8e41a22598eefc802314e81b879ae093f36ce9ddf39ab1ba", - "sha256:99625a92da8229df6d44335e6fcc558a5037dd0a760e11d84be2260e6f37002f", - "sha256:9cad97ab29dfc3f0249b483412c85c8ef4766d96cdf9dcf5a1e3caa3f3661cf1", - "sha256:a4abaec6ca3ad8660690236d11bfe28dfd707778e2442b45addd2f086d6ef094", - "sha256:a6e40afa7f45939ca356f348c8e23048e02cb109ced1eb8420961b2f40fb373a", - "sha256:a6f2fcca746e8d5910e18782f976489939d54a91f9411c32051b4aab2bd7c513", - "sha256:a806db027852538d2ad7555b203300173dd1b77ba116de92da9afbc3a3be3eed", - "sha256:abcabc8c2b26036d62d4c746381a6f7cf60aafcc653198ad678306986b09450d", - "sha256:b8526c6d437855442cdd3d87eede9c425c4445ea011ca38d937db299382e6fa3", - "sha256:bb06feb762bade6bf3c8b844462274db0c76acc95c52abe8dbed28ae3d44a147", - "sha256:c0a33bc9f02c2b17c3ea382f91b4db0e6cde90b63b296422a939886a7a80de1c", - "sha256:c4a549890a45f57f1ebf99c067a4ad0cb423a05544accaf2b065246827ed9603", - "sha256:ca244fa73f50a800cf8c3ebf7fd93149ec37f5cb9596aa8873ae2c1d23498601", - "sha256:cf877ab4ed6e302ec1d04952ca358b381a882fbd9d1b07cccbfd61783561f98a", - "sha256:d9d971ec1e79906046aa3ca266de79eac42f1dbf3612a05dc9368125952bd1a1", - "sha256:da25303d91526aac3672ee6d49a2f3db2d9502a4a60b55519feb1a4c7714e07d", - "sha256:e55e40ff0cc8cc5c07996915ad367fa47da6b3fc091fdadca7f5403239c5fec3", - "sha256:f03a532d7dee1bed20bc4884194a16160a2de9ffc6354b3878ec9682bb623c54", - "sha256:f1cd098434e83e656abf198f103a8207a8187c0fc110306691a2e94a78d0abb2", - "sha256:f2bfb563d0211ce16b63c7cb9395d2c682a23187f54c3d79bfec33e6705473c6", - "sha256:f8ffb705ffcf5ddd0e80b65ddf7bed7ee4f5a441ea7d3419e861a12eaf41af58" - ], - "markers": "python_version >= '3.7'", - "version": "==2.1.2" - }, - "mccabe": { - "hashes": [ - "sha256:348e0240c33b60bbdf4e523192ef919f28cb2c3d7d5c7794f74009290f236325", - "sha256:6c2d30ab6be0e4a46919781807b4f0d834ebdd6c6e3dca0bda5a15f863427b6e" - ], - "markers": "python_version >= '3.6'", - "version": "==0.7.0" - }, - "mdformat": { - "hashes": [ - "sha256:76398d03baa394f331fb560fd0aed8257cf77b65b1c8146b92d395af16253662", - "sha256:99b105033207d2ab70ba1ced8e07327ed4ef1e0a6bc1c7c00207ea73ab502782" - ], - "index": "pypi", - "version": "==0.7.16" - }, - "mdformat-beautysh": { - "hashes": [ - "sha256:23e52dc93ce4cdee12033766a6146f656c72095f74bc80a2bdfde1974a50ee72", - "sha256:b63a0e3adfc29238917b5d163483952f9326f7737cbe3137d3c6c512daf70789" - ], - "index": "pypi", - "version": "==0.1.1" - }, - "mdformat-black": { - "hashes": [ - "sha256:57cad92aee314b87dee52a795cdb52469ab166589d2771e3ad3dd19db907ab62", - "sha256:92a7f83779428ca04e939fcbd196ef6340bf9c83f33b2c50d4d3bb734a63d7b2" - ], - "index": "pypi", - "version": "==0.1.1" - }, - "mdformat-config": { - "hashes": [ - "sha256:0af65deef832886e9a47df126d1a083f278e47ca631fc5f5750adfd7b84239c4", - "sha256:a3f99eaf9970ee473be8cb459e323545781183327cae9bf070d2d3fb421dfd13" - ], - "index": "pypi", - "version": "==0.1.3" - }, - "mdformat-frontmatter": { - "hashes": [ - "sha256:15d3eed1543849d4fe72b1f75b8dffd8b49750c5149186591a1b9617178e2aa2", - "sha256:9c13f6b7a53de7b401af3c95e66735237545bd174e6619392153b296135ffd49" - ], - "index": "pypi", - "version": "==0.4.1" - }, - "mdformat-toc": { - "hashes": [ - "sha256:49d1f47d563f47405f3c165c6a4c30e8404a39f56ae254a27c2a90dd7eae1849", - "sha256:e8735f7517068f274b58b83407491b75445dc938473a3d5fa6467c0db0142daa" - ], - "index": "pypi", - "version": "==0.3.0" - }, - "mdit-py-plugins": { - "hashes": [ - "sha256:ca9a0714ea59a24b2b044a1831f48d817dd0c817e84339f20e7889f392d77c4e", - "sha256:eee0adc7195e5827e17e02d2a258a2ba159944a0748f59c5099a4a27f78fcf6a" - ], - "markers": "python_version >= '3.7'", - "version": "==0.3.5" - }, - "mdurl": { - "hashes": [ - "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8", - "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba" - ], - "markers": "python_version >= '3.7'", - "version": "==0.1.2" - }, - "mergedeep": { - "hashes": [ - "sha256:0096d52e9dad9939c3d975a774666af186eda617e6ca84df4c94dec30004f2a8", - "sha256:70775750742b25c0d8f36c55aed03d24c3384d17c951b3175d898bd778ef0307" - ], - "index": "pypi", - "version": "==1.3.4" - }, - "mkdocs": { - "hashes": [ - "sha256:8947af423a6d0facf41ea1195b8e1e8c85ad94ac95ae307fe11232e0424b11c5", - "sha256:c8856a832c1e56702577023cd64cc5f84948280c1c0fcc6af4cd39006ea6aa8c" - ], - "index": "pypi", - "version": "==1.4.2" - }, - "mkdocs-material": { - "hashes": [ - "sha256:1b1dbd8ef2508b358d93af55a5c5db3f141c95667fad802301ec621c40c7c217", - "sha256:1b6b3e9e09f922c2d7f1160fe15c8f43d4adc0d6fb81aa6ff0cbc7ef5b78ec75" - ], - "index": "pypi", - "version": "==7.3.6" - }, - "mkdocs-material-extensions": { - "hashes": [ - "sha256:9c003da71e2cc2493d910237448c672e00cefc800d3d6ae93d2fc69979e3bd93", - "sha256:e41d9f38e4798b6617ad98ca8f7f1157b1e4385ac1459ca1e4ea219b556df945" - ], - "markers": "python_version >= '3.7'", - "version": "==1.1.1" - }, - "more-itertools": { - "hashes": [ - "sha256:cabaa341ad0389ea83c17a94566a53ae4c9d07349861ecb14dc6d0345cf9ac5d", - "sha256:d2bc7f02446e86a68911e58ded76d6561eea00cddfb2a91e7019bbb586c799f3" - ], - "markers": "python_version >= '3.7'", - "version": "==9.1.0" - }, - "moto": { - "extras": [ - "cloudwatch", - "s3", - "sts" - ], - "hashes": [ - "sha256:0c29f5813d4db69b2f99c5538909a5aba0ba1cb91a74c19eddd9bfdc39ed2ff3", - "sha256:eaaed229742adbd1387383d113350ecd9222fc1e8f5611a9395a058c1eee4377" - ], - "index": "pypi", - "version": "==2.3.2" - }, - "mypy": { - "hashes": [ - "sha256:0714258640194d75677e86c786e80ccf294972cc76885d3ebbb560f11db0003d", - "sha256:0c8f3be99e8a8bd403caa8c03be619544bc2c77a7093685dcf308c6b109426c6", - "sha256:0cca5adf694af539aeaa6ac633a7afe9bbd760df9d31be55ab780b77ab5ae8bf", - "sha256:1c8cd4fb70e8584ca1ed5805cbc7c017a3d1a29fb450621089ffed3e99d1857f", - "sha256:1f7d1a520373e2272b10796c3ff721ea1a0712288cafaa95931e66aa15798813", - "sha256:209ee89fbb0deed518605edddd234af80506aec932ad28d73c08f1400ef80a33", - "sha256:26efb2fcc6b67e4d5a55561f39176821d2adf88f2745ddc72751b7890f3194ad", - "sha256:37bd02ebf9d10e05b00d71302d2c2e6ca333e6c2a8584a98c00e038db8121f05", - "sha256:3a700330b567114b673cf8ee7388e949f843b356a73b5ab22dd7cff4742a5297", - "sha256:3c0165ba8f354a6d9881809ef29f1a9318a236a6d81c690094c5df32107bde06", - "sha256:3d80e36b7d7a9259b740be6d8d906221789b0d836201af4234093cae89ced0cd", - "sha256:4175593dc25d9da12f7de8de873a33f9b2b8bdb4e827a7cae952e5b1a342e243", - "sha256:4307270436fd7694b41f913eb09210faff27ea4979ecbcd849e57d2da2f65305", - "sha256:5e80e758243b97b618cdf22004beb09e8a2de1af481382e4d84bc52152d1c476", - "sha256:641411733b127c3e0dab94c45af15fea99e4468f99ac88b39efb1ad677da5711", - "sha256:652b651d42f155033a1967739788c436491b577b6a44e4c39fb340d0ee7f0d70", - "sha256:6d7464bac72a85cb3491c7e92b5b62f3dcccb8af26826257760a552a5e244aa5", - "sha256:74e259b5c19f70d35fcc1ad3d56499065c601dfe94ff67ae48b85596b9ec1461", - "sha256:7d17e0a9707d0772f4a7b878f04b4fd11f6f5bcb9b3813975a9b13c9332153ab", - "sha256:901c2c269c616e6cb0998b33d4adbb4a6af0ac4ce5cd078afd7bc95830e62c1c", - "sha256:98e781cd35c0acf33eb0295e8b9c55cdbef64fcb35f6d3aa2186f289bed6e80d", - "sha256:a12c56bf73cdab116df96e4ff39610b92a348cc99a1307e1da3c3768bbb5b135", - "sha256:ac6e503823143464538efda0e8e356d871557ef60ccd38f8824a4257acc18d93", - "sha256:b8472f736a5bfb159a5e36740847808f6f5b659960115ff29c7cecec1741c648", - "sha256:b86ce2c1866a748c0f6faca5232059f881cda6dda2a893b9a8373353cfe3715a", - "sha256:bc9ec663ed6c8f15f4ae9d3c04c989b744436c16d26580eaa760ae9dd5d662eb", - "sha256:c9166b3f81a10cdf9b49f2d594b21b31adadb3d5e9db9b834866c3258b695be3", - "sha256:d13674f3fb73805ba0c45eb6c0c3053d218aa1f7abead6e446d474529aafc372", - "sha256:de32edc9b0a7e67c2775e574cb061a537660e51210fbf6006b0b36ea695ae9bb", - "sha256:e62ebaad93be3ad1a828a11e90f0e76f15449371ffeecca4a0a0b9adc99abcef" - ], - "index": "pypi", - "version": "==0.991" - }, - "mypy-extensions": { - "hashes": [ - "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d", - "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782" - ], - "markers": "python_version >= '3.5'", - "version": "==1.0.0" - }, - "packaging": { - "hashes": [ - "sha256:714ac14496c3e68c99c29b00845f7a2b85f3bb6f1078fd9f72fd20f0570002b2", - "sha256:b6ad297f8907de0fa2fe1ccbd26fdaf387f5f47c7275fedf8cce89f99446cf97" - ], - "markers": "python_version >= '3.7'", - "version": "==23.0" - }, - "pathspec": { - "hashes": [ - "sha256:2798de800fa92780e33acca925945e9a19a133b715067cf165b8866c15a31687", - "sha256:d8af70af76652554bd134c22b3e8a1cc46ed7d91edcdd721ef1a0c51a84a5293" - ], - "markers": "python_version >= '3.7'", - "version": "==0.11.1" - }, - "pbr": { - "hashes": [ - "sha256:567f09558bae2b3ab53cb3c1e2e33e726ff3338e7bae3db5dc954b3a44eef12b", - "sha256:aefc51675b0b533d56bb5fd1c8c6c0522fe31896679882e1c4c63d5e4a0fccb3" - ], - "markers": "python_version >= '2.6'", - "version": "==5.11.1" - }, - "pdoc": { - "hashes": [ - "sha256:681a2f243e4ca51bedd0645c2d18275b8b83444e9b6e42b502882ec45369e679" - ], - "index": "pypi", - "version": "==7.4.0" - }, - "pkginfo": { - "hashes": [ - "sha256:4b7a555a6d5a22169fcc9cf7bfd78d296b0361adad412a346c1226849af5e546", - "sha256:8fd5896e8718a4372f0ea9cc9d96f6417c9b986e23a4d116dda26b62cc29d046" - ], - "markers": "python_version >= '3.6'", - "version": "==1.9.6" - }, - "platformdirs": { - "hashes": [ - "sha256:d5b638ca397f25f979350ff789db335903d7ea010ab28903f57b27e1b16c2b08", - "sha256:ebe11c0d7a805086e99506aa331612429a72ca7cd52a1f0d277dc4adc20cb10e" - ], - "markers": "python_version >= '3.7'", - "version": "==3.2.0" - }, - "pluggy": { - "hashes": [ - "sha256:4224373bacce55f955a878bf9cfa763c1e360858e330072059e10bad68531159", - "sha256:74134bbf457f031a36d68416e1509f34bd5ccc019f0bcc952c7b909d06b37bd3" - ], - "markers": "python_version >= '3.6'", - "version": "==1.0.0" - }, - "publication": { - "hashes": [ - "sha256:0248885351febc11d8a1098d5c8e3ab2dabcf3e8c0c96db1e17ecd12b53afbe6", - "sha256:68416a0de76dddcdd2930d1c8ef853a743cc96c82416c4e4d3b5d901c6276dc4" - ], - "version": "==0.0.3" - }, - "py": { - "hashes": [ - "sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719", - "sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378" - ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'", - "version": "==1.11.0" - }, - "pycparser": { - "hashes": [ - "sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9", - "sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206" - ], - "version": "==2.21" - }, - "pygments": { - "hashes": [ - "sha256:b3ed06a9e8ac9a9aae5a6f5dbe78a8a58655d17b43b93c078f094ddc476ae297", - "sha256:fa7bd7bd2771287c0de303af8bfdfc731f51bd2c6a47ab69d117138893b82717" - ], - "markers": "python_version >= '3.6'", - "version": "==2.14.0" - }, - "pylint": { - "hashes": [ - "sha256:001cc91366a7df2970941d7e6bbefcbf98694e00102c1f121c531a814ddc2ea8", - "sha256:1b647da5249e7c279118f657ca28b6aaebb299f86bf92affc632acf199f7adbb" - ], - "index": "pypi", - "version": "==2.17.2" - }, - "pylint-json2html": { - "hashes": [ - "sha256:65f37c5289ff05f998251487519f58c4fc9b52cd1fc09e8fc5ad75fff7aacfd6", - "sha256:79e681b6df76bb0b3d1a0c753cd3286d243de50905b4a9b63d0f17e5713dee7c" - ], - "index": "pypi", - "version": "==0.4.0" - }, - "pylint-pytest": { - "hashes": [ - "sha256:fb20ef318081cee3d5febc631a7b9c40fa356b05e4f769d6e60a337e58c8879b" - ], - "index": "pypi", - "version": "==1.1.2" - }, - "pymdown-extensions": { - "hashes": [ - "sha256:31eaa76ce6f96aabfcea98787c2fff2c5c0611b20a53a94213970cfbf05f02b8", - "sha256:562c38eee4ce3f101ce631b804bfc2177a8a76c7e4dc908871fb6741a90257a7" - ], - "markers": "python_version >= '3.7'", - "version": "==9.10" - }, - "pytest": { - "hashes": [ - "sha256:131b36680866a76e6781d13f101efb86cf674ebb9762eb70d3082b6f29889e89", - "sha256:7310f8d27bc79ced999e760ca304d69f6ba6c6649c0b60fb0e04a4a77cacc134" - ], - "index": "pypi", - "version": "==6.2.5" - }, - "pytest-asyncio": { - "hashes": [ - "sha256:83cbf01169ce3e8eb71c6c278ccb0574d1a7a3bb8eaaf5e50e0ad342afb33b36", - "sha256:f129998b209d04fcc65c96fc85c11e5316738358909a8399e93be553d7656442" - ], - "index": "pypi", - "version": "==0.20.3" - }, - "pytest-console-scripts": { - "hashes": [ - "sha256:21063b2e32df96da51412116e654babb1447a415929158d81d445667de9ea946", - "sha256:5c6c3daae9cf9fbed0e655072128938600193dc002a5cf1b187248644ba02857" - ], - "index": "pypi", - "version": "==1.3.1" - }, - "pytest-cov": { - "hashes": [ - "sha256:261bb9e47e65bd099c89c3edf92972865210c36813f80ede5277dceb77a4a62a", - "sha256:261ceeb8c227b726249b376b8526b600f38667ee314f910353fa318caa01f4d7" - ], - "index": "pypi", - "version": "==2.12.1" - }, - "pytest-env": { - "hashes": [ - "sha256:7e94956aef7f2764f3c147d216ce066bf6c42948bb9e293169b1b1c880a580c2" - ], - "index": "pypi", - "version": "==0.6.2" - }, - "pytest-httpx": { - "hashes": [ - "sha256:50b52b910f6f6cfb0aa65039d6f5bedb6ae3a0c02a98c4a7187543fe437c428a", - "sha256:edcb62baceffbd57753c1a7afc4656b0e71e91c7a512e143c0adbac762d979c1" - ], - "index": "pypi", - "version": "==0.21.3" - }, - "pytest-mock": { - "hashes": [ - "sha256:f4c973eeae0282963eb293eb173ce91b091a79c1334455acfac9ddee8a1c784b", - "sha256:fbbdb085ef7c252a326fd8cdcac0aa3b1333d8811f131bdcc701002e1be7ed4f" - ], - "index": "pypi", - "version": "==3.10.0" - }, - "pytest-reportlog": { - "hashes": [ - "sha256:65ac38cb5af90470df3dde6c03a6dd88090913d16765ee54d135279b5579c113", - "sha256:df59f7f1fcd9a0388e39b30e5aa264a609e64953e116f3ea6eb3aab22e3658e6" - ], - "index": "pypi", - "version": "==0.2.1" - }, - "python-dateutil": { - "hashes": [ - "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86", - "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9" - ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2'", - "version": "==2.8.2" - }, - "pytz": { - "hashes": [ - "sha256:1d8ce29db189191fb55338ee6d0387d82ab59f3d00eac103412d64e0ebd0c588", - "sha256:a151b3abb88eda1d4e34a9814df37de2a80e301e68ba0fd856fb9b46bfbbbffb" - ], - "version": "==2023.3" - }, - "pyyaml": { - "hashes": [ - "sha256:01b45c0191e6d66c470b6cf1b9531a771a83c1c4208272ead47a3ae4f2f603bf", - "sha256:0283c35a6a9fbf047493e3a0ce8d79ef5030852c51e9d911a27badfde0605293", - "sha256:055d937d65826939cb044fc8c9b08889e8c743fdc6a32b33e2390f66013e449b", - "sha256:07751360502caac1c067a8132d150cf3d61339af5691fe9e87803040dbc5db57", - "sha256:0b4624f379dab24d3725ffde76559cff63d9ec94e1736b556dacdfebe5ab6d4b", - "sha256:0ce82d761c532fe4ec3f87fc45688bdd3a4c1dc5e0b4a19814b9009a29baefd4", - "sha256:1e4747bc279b4f613a09eb64bba2ba602d8a6664c6ce6396a4d0cd413a50ce07", - "sha256:213c60cd50106436cc818accf5baa1aba61c0189ff610f64f4a3e8c6726218ba", - "sha256:231710d57adfd809ef5d34183b8ed1eeae3f76459c18fb4a0b373ad56bedcdd9", - "sha256:277a0ef2981ca40581a47093e9e2d13b3f1fbbeffae064c1d21bfceba2030287", - "sha256:2cd5df3de48857ed0544b34e2d40e9fac445930039f3cfe4bcc592a1f836d513", - "sha256:40527857252b61eacd1d9af500c3337ba8deb8fc298940291486c465c8b46ec0", - "sha256:432557aa2c09802be39460360ddffd48156e30721f5e8d917f01d31694216782", - "sha256:473f9edb243cb1935ab5a084eb238d842fb8f404ed2193a915d1784b5a6b5fc0", - "sha256:48c346915c114f5fdb3ead70312bd042a953a8ce5c7106d5bfb1a5254e47da92", - "sha256:50602afada6d6cbfad699b0c7bb50d5ccffa7e46a3d738092afddc1f9758427f", - "sha256:68fb519c14306fec9720a2a5b45bc9f0c8d1b9c72adf45c37baedfcd949c35a2", - "sha256:77f396e6ef4c73fdc33a9157446466f1cff553d979bd00ecb64385760c6babdc", - "sha256:81957921f441d50af23654aa6c5e5eaf9b06aba7f0a19c18a538dc7ef291c5a1", - "sha256:819b3830a1543db06c4d4b865e70ded25be52a2e0631ccd2f6a47a2822f2fd7c", - "sha256:897b80890765f037df3403d22bab41627ca8811ae55e9a722fd0392850ec4d86", - "sha256:98c4d36e99714e55cfbaaee6dd5badbc9a1ec339ebfc3b1f52e293aee6bb71a4", - "sha256:9df7ed3b3d2e0ecfe09e14741b857df43adb5a3ddadc919a2d94fbdf78fea53c", - "sha256:9fa600030013c4de8165339db93d182b9431076eb98eb40ee068700c9c813e34", - "sha256:a80a78046a72361de73f8f395f1f1e49f956c6be882eed58505a15f3e430962b", - "sha256:afa17f5bc4d1b10afd4466fd3a44dc0e245382deca5b3c353d8b757f9e3ecb8d", - "sha256:b3d267842bf12586ba6c734f89d1f5b871df0273157918b0ccefa29deb05c21c", - "sha256:b5b9eccad747aabaaffbc6064800670f0c297e52c12754eb1d976c57e4f74dcb", - "sha256:bfaef573a63ba8923503d27530362590ff4f576c626d86a9fed95822a8255fd7", - "sha256:c5687b8d43cf58545ade1fe3e055f70eac7a5a1a0bf42824308d868289a95737", - "sha256:cba8c411ef271aa037d7357a2bc8f9ee8b58b9965831d9e51baf703280dc73d3", - "sha256:d15a181d1ecd0d4270dc32edb46f7cb7733c7c508857278d3d378d14d606db2d", - "sha256:d4b0ba9512519522b118090257be113b9468d804b19d63c71dbcf4a48fa32358", - "sha256:d4db7c7aef085872ef65a8fd7d6d09a14ae91f691dec3e87ee5ee0539d516f53", - "sha256:d4eccecf9adf6fbcc6861a38015c2a64f38b9d94838ac1810a9023a0609e1b78", - "sha256:d67d839ede4ed1b28a4e8909735fc992a923cdb84e618544973d7dfc71540803", - "sha256:daf496c58a8c52083df09b80c860005194014c3698698d1a57cbcfa182142a3a", - "sha256:dbad0e9d368bb989f4515da330b88a057617d16b6a8245084f1b05400f24609f", - "sha256:e61ceaab6f49fb8bdfaa0f92c4b57bcfbea54c09277b1b4f7ac376bfb7a7c174", - "sha256:f84fbc98b019fef2ee9a1cb3ce93e3187a6df0b2538a651bfb890254ba9f90b5" - ], - "markers": "python_version >= '3.6'", - "version": "==6.0" - }, - "pyyaml-env-tag": { - "hashes": [ - "sha256:70092675bda14fdec33b31ba77e7543de9ddc88f2e5b99160396572d11525bdb", - "sha256:af31106dec8a4d68c60207c1886031cbf839b68aa7abccdb19868200532c2069" - ], - "markers": "python_version >= '3.6'", - "version": "==0.1" - }, - "rdk": { - "editable": true, - "path": "." - }, - "readme-renderer": { - "hashes": [ - "sha256:cd653186dfc73055656f090f227f5cb22a046d7f71a841dfa305f55c9a513273", - "sha256:f67a16caedfa71eef48a31b39708637a6f4664c4394801a7b0d6432d13907343" - ], - "markers": "python_version >= '3.7'", - "version": "==37.3" - }, - "regex": { - "hashes": [ - "sha256:086afe222d58b88b62847bdbd92079b4699350b4acab892f88a935db5707c790", - "sha256:0b8eb1e3bca6b48dc721818a60ae83b8264d4089a4a41d62be6d05316ec38e15", - "sha256:11d00c31aeab9a6e0503bc77e73ed9f4527b3984279d997eb145d7c7be6268fd", - "sha256:11d1f2b7a0696dc0310de0efb51b1f4d813ad4401fe368e83c0c62f344429f98", - "sha256:1b1fc2632c01f42e06173d8dd9bb2e74ab9b0afa1d698058c867288d2c7a31f3", - "sha256:20abe0bdf03630fe92ccafc45a599bca8b3501f48d1de4f7d121153350a2f77d", - "sha256:22720024b90a6ba673a725dcc62e10fb1111b889305d7c6b887ac7466b74bedb", - "sha256:2472428efc4127374f494e570e36b30bb5e6b37d9a754f7667f7073e43b0abdd", - "sha256:25f0532fd0c53e96bad84664171969de9673b4131f2297f1db850d3918d58858", - "sha256:2848bf76673c83314068241c8d5b7fa9ad9bed866c979875a0e84039349e8fa7", - "sha256:37ae17d3be44c0b3f782c28ae9edd8b47c1f1776d4cabe87edc0b98e1f12b021", - "sha256:3cd9f5dd7b821f141d3a6ca0d5d9359b9221e4f051ca3139320adea9f1679691", - "sha256:4479f9e2abc03362df4045b1332d4a2b7885b245a30d4f4b051c4083b97d95d8", - "sha256:4c49552dc938e3588f63f8a78c86f3c9c75301e813bca0bef13bdb4b87ccf364", - "sha256:539dd010dc35af935b32f248099e38447bbffc10b59c2b542bceead2bed5c325", - "sha256:54c3fa855a3f7438149de3211738dd9b5f0c733f48b54ae05aa7fce83d48d858", - "sha256:55ae114da21b7a790b90255ea52d2aa3a0d121a646deb2d3c6a3194e722fc762", - "sha256:5ccfafd98473e007cebf7da10c1411035b7844f0f204015efd050601906dbb53", - "sha256:5fc33b27b1d800fc5b78d7f7d0f287e35079ecabe68e83d46930cf45690e1c8c", - "sha256:6560776ec19c83f3645bbc5db64a7a5816c9d8fb7ed7201c5bcd269323d88072", - "sha256:6572ff287176c0fb96568adb292674b421fa762153ed074d94b1d939ed92c253", - "sha256:6b190a339090e6af25f4a5fd9e77591f6d911cc7b96ecbb2114890b061be0ac1", - "sha256:7304863f3a652dab5e68e6fb1725d05ebab36ec0390676d1736e0571ebb713ef", - "sha256:75f288c60232a5339e0ff2fa05779a5e9c74e9fc085c81e931d4a264501e745b", - "sha256:7868b8f218bf69a2a15402fde08b08712213a1f4b85a156d90473a6fb6b12b09", - "sha256:787954f541ab95d8195d97b0b8cf1dc304424adb1e07365967e656b92b38a699", - "sha256:78ac8dd8e18800bb1f97aad0d73f68916592dddf233b99d2b5cabc562088503a", - "sha256:79e29fd62fa2f597a6754b247356bda14b866131a22444d67f907d6d341e10f3", - "sha256:845a5e2d84389c4ddada1a9b95c055320070f18bb76512608374aca00d22eca8", - "sha256:86b036f401895e854de9fefe061518e78d506d8a919cc250dc3416bca03f6f9a", - "sha256:87d9951f5a538dd1d016bdc0dcae59241d15fa94860964833a54d18197fcd134", - "sha256:8a9c63cde0eaa345795c0fdeb19dc62d22e378c50b0bc67bf4667cd5b482d98b", - "sha256:93f3f1aa608380fe294aa4cb82e2afda07a7598e828d0341e124b8fd9327c715", - "sha256:9bf4a5626f2a0ea006bf81e8963f498a57a47d58907eaa58f4b3e13be68759d8", - "sha256:9d764514d19b4edcc75fd8cb1423448ef393e8b6cbd94f38cab983ab1b75855d", - "sha256:a610e0adfcb0fc84ea25f6ea685e39e74cbcd9245a72a9a7aab85ff755a5ed27", - "sha256:a81c9ec59ca2303acd1ccd7b9ac409f1e478e40e96f8f79b943be476c5fdb8bb", - "sha256:b7006105b10b59971d3b248ad75acc3651c7e4cf54d81694df5a5130a3c3f7ea", - "sha256:c07ce8e9eee878a48ebeb32ee661b49504b85e164b05bebf25420705709fdd31", - "sha256:c125a02d22c555e68f7433bac8449992fa1cead525399f14e47c2d98f2f0e467", - "sha256:c37df2a060cb476d94c047b18572ee2b37c31f831df126c0da3cd9227b39253d", - "sha256:c869260aa62cee21c5eb171a466c0572b5e809213612ef8d495268cd2e34f20d", - "sha256:c88e8c226473b5549fe9616980ea7ca09289246cfbdf469241edf4741a620004", - "sha256:cd1671e9d5ac05ce6aa86874dd8dfa048824d1dbe73060851b310c6c1a201a96", - "sha256:cde09c4fdd070772aa2596d97e942eb775a478b32459e042e1be71b739d08b77", - "sha256:cf86b4328c204c3f315074a61bc1c06f8a75a8e102359f18ce99fbcbbf1951f0", - "sha256:d5bbe0e1511b844794a3be43d6c145001626ba9a6c1db8f84bdc724e91131d9d", - "sha256:d895b4c863059a4934d3e874b90998df774644a41b349ebb330f85f11b4ef2c0", - "sha256:db034255e72d2995cf581b14bb3fc9c00bdbe6822b49fcd4eef79e1d5f232618", - "sha256:dbb3f87e15d3dd76996d604af8678316ad2d7d20faa394e92d9394dfd621fd0c", - "sha256:dc80df325b43ffea5cdea2e3eaa97a44f3dd298262b1c7fe9dbb2a9522b956a7", - "sha256:dd7200b4c27b68cf9c9646da01647141c6db09f48cc5b51bc588deaf8e98a797", - "sha256:df45fac182ebc3c494460c644e853515cc24f5ad9da05f8ffb91da891bfee879", - "sha256:e152461e9a0aedec7d37fc66ec0fa635eca984777d3d3c3e36f53bf3d3ceb16e", - "sha256:e2396e0678167f2d0c197da942b0b3fb48fee2f0b5915a0feb84d11b6686afe6", - "sha256:e76b6fc0d8e9efa39100369a9b3379ce35e20f6c75365653cf58d282ad290f6f", - "sha256:ea3c0cb56eadbf4ab2277e7a095676370b3e46dbfc74d5c383bd87b0d6317910", - "sha256:ef3f528fe1cc3d139508fe1b22523745aa77b9d6cb5b0bf277f48788ee0b993f", - "sha256:fdf7ad455f1916b8ea5cdbc482d379f6daf93f3867b4232d14699867a5a13af7", - "sha256:fffe57312a358be6ec6baeb43d253c36e5790e436b7bf5b7a38df360363e88e9" - ], - "markers": "python_version >= '3.8'", - "version": "==2023.3.23" - }, - "requests": { - "hashes": [ - "sha256:64299f4909223da747622c030b781c0d7811e359c37124b4bd368fb8c6518baa", - "sha256:98b1b2782e3c6c4904938b84c0eb932721069dfdb9134313beff7c83c2df24bf" - ], - "markers": "python_version >= '3.7' and python_version < '4'", - "version": "==2.28.2" - }, - "requests-toolbelt": { - "hashes": [ - "sha256:18565aa58116d9951ac39baa288d3adb5b3ff975c4f25eee78555d89e8f247f7", - "sha256:62e09f7ff5ccbda92772a29f394a49c3ad6cb181d568b1337626b2abb628a63d" - ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", - "version": "==0.10.1" - }, - "responses": { - "hashes": [ - "sha256:8a3a5915713483bf353b6f4079ba8b2a29029d1d1090a503c70b0dc5d9d0c7bd", - "sha256:c4d9aa9fc888188f0c673eff79a8dadbe2e75b7fe879dc80a221a06e0a68138f" - ], - "markers": "python_version >= '3.7'", - "version": "==0.23.1" - }, - "rfc3986": { - "extras": [ - "idna2008" - ], - "hashes": [ - "sha256:270aaf10d87d0d4e095063c65bf3ddbc6ee3d0b226328ce21e036f946e421835", - "sha256:a86d6e1f5b1dc238b218b012df0aa79409667bb209e58da56d0b94704e712a97" - ], - "version": "==1.5.0" - }, - "rich": { - "hashes": [ - "sha256:540c7d6d26a1178e8e8b37e9ba44573a3cd1464ff6348b99ee7061b95d1c6333", - "sha256:dc84400a9d842b3a9c5ff74addd8eb798d155f36c1c91303888e0a66850d2a15" - ], - "markers": "python_full_version >= '3.7.0'", - "version": "==13.3.3" - }, - "ruamel.yaml": { - "hashes": [ - "sha256:742b35d3d665023981bd6d16b3d24248ce5df75fdb4e2924e93a05c1f8b61ca7", - "sha256:8b7ce697a2f212752a35c1ac414471dc16c424c9573be4926b56ff3f5d23b7af" - ], - "markers": "python_version >= '3'", - "version": "==0.17.21" - }, - "ruamel.yaml.clib": { - "hashes": [ - "sha256:045e0626baf1c52e5527bd5db361bc83180faaba2ff586e763d3d5982a876a9e", - "sha256:15910ef4f3e537eea7fe45f8a5d19997479940d9196f357152a09031c5be59f3", - "sha256:184faeaec61dbaa3cace407cffc5819f7b977e75360e8d5ca19461cd851a5fc5", - "sha256:1f08fd5a2bea9c4180db71678e850b995d2a5f4537be0e94557668cf0f5f9497", - "sha256:2aa261c29a5545adfef9296b7e33941f46aa5bbd21164228e833412af4c9c75f", - "sha256:3110a99e0f94a4a3470ff67fc20d3f96c25b13d24c6980ff841e82bafe827cac", - "sha256:3243f48ecd450eddadc2d11b5feb08aca941b5cd98c9b1db14b2fd128be8c697", - "sha256:370445fd795706fd291ab00c9df38a0caed0f17a6fb46b0f607668ecb16ce763", - "sha256:40d030e2329ce5286d6b231b8726959ebbe0404c92f0a578c0e2482182e38282", - "sha256:41d0f1fa4c6830176eef5b276af04c89320ea616655d01327d5ce65e50575c94", - "sha256:4a4d8d417868d68b979076a9be6a38c676eca060785abaa6709c7b31593c35d1", - "sha256:4b3a93bb9bc662fc1f99c5c3ea8e623d8b23ad22f861eb6fce9377ac07ad6072", - "sha256:5bc0667c1eb8f83a3752b71b9c4ba55ef7c7058ae57022dd9b29065186a113d9", - "sha256:721bc4ba4525f53f6a611ec0967bdcee61b31df5a56801281027a3a6d1c2daf5", - "sha256:763d65baa3b952479c4e972669f679fe490eee058d5aa85da483ebae2009d231", - "sha256:7bdb4c06b063f6fd55e472e201317a3bb6cdeeee5d5a38512ea5c01e1acbdd93", - "sha256:8831a2cedcd0f0927f788c5bdf6567d9dc9cc235646a434986a852af1cb54b4b", - "sha256:91a789b4aa0097b78c93e3dc4b40040ba55bef518f84a40d4442f713b4094acb", - "sha256:92460ce908546ab69770b2e576e4f99fbb4ce6ab4b245345a3869a0a0410488f", - "sha256:99e77daab5d13a48a4054803d052ff40780278240a902b880dd37a51ba01a307", - "sha256:a234a20ae07e8469da311e182e70ef6b199d0fbeb6c6cc2901204dd87fb867e8", - "sha256:a7b301ff08055d73223058b5c46c55638917f04d21577c95e00e0c4d79201a6b", - "sha256:be2a7ad8fd8f7442b24323d24ba0b56c51219513cfa45b9ada3b87b76c374d4b", - "sha256:bf9a6bc4a0221538b1a7de3ed7bca4c93c02346853f44e1cd764be0023cd3640", - "sha256:c3ca1fbba4ae962521e5eb66d72998b51f0f4d0f608d3c0347a48e1af262efa7", - "sha256:d000f258cf42fec2b1bbf2863c61d7b8918d31ffee905da62dede869254d3b8a", - "sha256:d5859983f26d8cd7bb5c287ef452e8aacc86501487634573d260968f753e1d71", - "sha256:d5e51e2901ec2366b79f16c2299a03e74ba4531ddcfacc1416639c557aef0ad8", - "sha256:da538167284de58a52109a9b89b8f6a53ff8437dd6dc26d33b57bf6699153122", - "sha256:debc87a9516b237d0466a711b18b6ebeb17ba9f391eb7f91c649c5c4ec5006c7", - "sha256:df5828871e6648db72d1c19b4bd24819b80a755c4541d3409f0f7acd0f335c80", - "sha256:ecdf1a604009bd35c674b9225a8fa609e0282d9b896c03dd441a91e5f53b534e", - "sha256:efa08d63ef03d079dcae1dfe334f6c8847ba8b645d08df286358b1f5293d24ab", - "sha256:f01da5790e95815eb5a8a138508c01c758e5f5bc0ce4286c4f7028b8dd7ac3d0", - "sha256:f34019dced51047d6f70cb9383b2ae2853b7fc4dce65129a5acd49f4f9256646", - "sha256:f6d3d39611ac2e4f62c3128a9eed45f19a6608670c5a2f4f07f24e8de3441d38" - ], - "markers": "platform_python_implementation == 'CPython' and python_version < '3.11'", - "version": "==0.2.7" - }, - "s3transfer": { - "hashes": [ - "sha256:06176b74f3a15f61f1b4f25a1fc29a4429040b7647133a463da8fa5bd28d5ecd", - "sha256:2ed07d3866f523cc561bf4a00fc5535827981b117dd7876f036b0c1aca42c947" - ], - "markers": "python_version >= '3.7'", - "version": "==0.6.0" - }, - "semver": { - "hashes": [ - "sha256:ced8b23dceb22134307c1b8abfa523da14198793d9787ac838e70e29e77458d4", - "sha256:fa0fe2722ee1c3f57eac478820c3a5ae2f624af8264cbdf9000c980ff7f75e3f" - ], - "index": "pypi", - "version": "==2.13.0" - }, - "sh": { - "hashes": [ - "sha256:e4045b6c732d9ce75d571c79f5ac2234edd9ae4f5fa9d59b09705082bdca18c7" - ], - "markers": "sys_platform != 'win32'", - "version": "==1.14.3" - }, - "six": { - "hashes": [ - "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926", - "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254" - ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2'", - "version": "==1.16.0" - }, - "smmap": { - "hashes": [ - "sha256:2aba19d6a040e78d8b09de5c57e96207b09ed71d8e55ce0959eeee6c8e190d94", - "sha256:c840e62059cd3be204b0c9c9f74be2c09d5648eddd4580d9314c3ecde0b30936" - ], - "markers": "python_version >= '3.6'", - "version": "==5.0.0" - }, - "sniffio": { - "hashes": [ - "sha256:e60305c5e5d314f5389259b7f22aaa33d8f7dee49763119234af3755c55b9101", - "sha256:eecefdce1e5bbfb7ad2eeaabf7c1eeb404d7757c379bd1f7e5cce9d8bf425384" - ], - "markers": "python_version >= '3.7'", - "version": "==1.3.0" - }, - "stevedore": { - "hashes": [ - "sha256:2c428d2338976279e8eb2196f7a94910960d9f7ba2f41f3988511e95ca447021", - "sha256:bd5a71ff5e5e5f5ea983880e4a1dd1bb47f8feebbb3d95b592398e2f02194771" - ], - "markers": "python_version >= '3.8'", - "version": "==5.0.0" - }, - "toml": { - "hashes": [ - "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b", - "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f" - ], - "markers": "python_version >= '2.6' and python_version not in '3.0, 3.1, 3.2'", - "version": "==0.10.2" - }, - "tomli": { - "hashes": [ - "sha256:05b6166bff487dc068d322585c7ea4ef78deed501cc124060e0f238e89a9231f", - "sha256:e3069e4be3ead9668e21cb9b074cd948f7b3113fd9c8bba083f48247aab8b11c" - ], - "markers": "python_version >= '3.6'", - "version": "==1.2.3" - }, - "tomlkit": { - "hashes": [ - "sha256:5325463a7da2ef0c6bbfefb62a3dc883aebe679984709aee32a317907d0a8d3c", - "sha256:f392ef70ad87a672f02519f99967d28a4d3047133e2d1df936511465fbb3791d" - ], - "markers": "python_version >= '3.7'", - "version": "==0.11.7" - }, - "tqdm": { - "hashes": [ - "sha256:1871fb68a86b8fb3b59ca4cdd3dcccbc7e6d613eeed31f4c332531977b89beb5", - "sha256:c4f53a17fe37e132815abceec022631be8ffe1b9381c2e6e30aa70edc99e9671" - ], - "markers": "python_version >= '3.7'", - "version": "==4.65.0" - }, - "twine": { - "hashes": [ - "sha256:8efa52658e0ae770686a13b675569328f1fba9837e5de1867bfe5f46a9aefe19", - "sha256:d0550fca9dc19f3d5e8eadfce0c227294df0a2a951251a4385797c8a6198b7c8" - ], - "index": "pypi", - "version": "==3.8.0" - }, - "typeguard": { - "hashes": [ - "sha256:00edaa8da3a133674796cf5ea87d9f4b4c367d77476e185e80251cc13dfbb8c4", - "sha256:5e3e3be01e887e7eafae5af63d1f36c849aaa94e3a0112097312aabfa16284f1" - ], - "markers": "python_full_version >= '3.5.3'", - "version": "==2.13.3" - }, - "types-aiofiles": { - "hashes": [ - "sha256:1f93aa68e47de1379f45eef9acd34faa0f9341628921cd6aede666e6e559a5a8", - "sha256:be6715fffd1c7f84c9316000ba8bbc66a884246dbd2902c163ebc2d67315206b" - ], - "index": "pypi", - "version": "==0.8.11" - }, - "types-colorama": { - "hashes": [ - "sha256:a9421eb24d9cfc584880dc1d33b7fd406a14227c1f99f50c5ab9265e04d07638", - "sha256:d1e37571a19e152c930b3e789c316e9332e51a43bfcd4470b98225be974fb90c" - ], - "version": "==0.4.15.11" - }, - "types-pyyaml": { - "hashes": [ - "sha256:5aed5aa66bd2d2e158f75dda22b059570ede988559f030cf294871d3b647e3e8", - "sha256:c51b1bd6d99ddf0aa2884a7a328810ebf70a4262c292195d3f4f9a0005f9eeb6" - ], - "version": "==6.0.12.9" - }, - "types-setuptools": { - "hashes": [ - "sha256:8ee03d823fe7fda0bd35faeae33d35cb5c25b497263e6a58b34c4cfd05f40bcf", - "sha256:9660b8774b12cd61b448e2fd87a667c02e7ec13ce9f15171f1d49a4654c4df6a" - ], - "version": "==57.4.18" - }, - "typing-extensions": { - "hashes": [ - "sha256:5cb5f4a79139d699607b3ef622a1dedafa84e115ab0024e0d9c044a9479ca7cb", - "sha256:fb33085c39dd998ac16d1431ebc293a8b3eedd00fd4a32de0ff79002c19511b4" - ], - "markers": "python_version >= '3.7'", - "version": "==4.5.0" - }, - "untokenize": { - "hashes": [ - "sha256:3865dbbbb8efb4bb5eaa72f1be7f3e0be00ea8b7f125c69cbd1f5fda926f37a2" - ], - "version": "==0.1.1" - }, - "urllib3": { - "hashes": [ - "sha256:8a388717b9476f934a21484e8c8e61875ab60644d29b9b39e11e4b9dc1c6b305", - "sha256:aa751d169e23c7479ce47a0cb0da579e3ede798f994f5816a74e4f4500dcea42" - ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4, 3.5'", - "version": "==1.26.15" - }, - "watchdog": { - "hashes": [ - "sha256:0e06ab8858a76e1219e68c7573dfeba9dd1c0219476c5a44d5333b01d7e1743a", - "sha256:13bbbb462ee42ec3c5723e1205be8ced776f05b100e4737518c67c8325cf6100", - "sha256:233b5817932685d39a7896b1090353fc8efc1ef99c9c054e46c8002561252fb8", - "sha256:25f70b4aa53bd743729c7475d7ec41093a580528b100e9a8c5b5efe8899592fc", - "sha256:2b57a1e730af3156d13b7fdddfc23dea6487fceca29fc75c5a868beed29177ae", - "sha256:336adfc6f5cc4e037d52db31194f7581ff744b67382eb6021c868322e32eef41", - "sha256:3aa7f6a12e831ddfe78cdd4f8996af9cf334fd6346531b16cec61c3b3c0d8da0", - "sha256:3ed7c71a9dccfe838c2f0b6314ed0d9b22e77d268c67e015450a29036a81f60f", - "sha256:4c9956d27be0bb08fc5f30d9d0179a855436e655f046d288e2bcc11adfae893c", - "sha256:4d98a320595da7a7c5a18fc48cb633c2e73cda78f93cac2ef42d42bf609a33f9", - "sha256:4f94069eb16657d2c6faada4624c39464f65c05606af50bb7902e036e3219be3", - "sha256:5113334cf8cf0ac8cd45e1f8309a603291b614191c9add34d33075727a967709", - "sha256:51f90f73b4697bac9c9a78394c3acbbd331ccd3655c11be1a15ae6fe289a8c83", - "sha256:5d9f3a10e02d7371cd929b5d8f11e87d4bad890212ed3901f9b4d68767bee759", - "sha256:7ade88d0d778b1b222adebcc0927428f883db07017618a5e684fd03b83342bd9", - "sha256:7c5f84b5194c24dd573fa6472685b2a27cc5a17fe5f7b6fd40345378ca6812e3", - "sha256:7e447d172af52ad204d19982739aa2346245cc5ba6f579d16dac4bfec226d2e7", - "sha256:8ae9cda41fa114e28faf86cb137d751a17ffd0316d1c34ccf2235e8a84365c7f", - "sha256:8f3ceecd20d71067c7fd4c9e832d4e22584318983cabc013dbf3f70ea95de346", - "sha256:9fac43a7466eb73e64a9940ac9ed6369baa39b3bf221ae23493a9ec4d0022674", - "sha256:a70a8dcde91be523c35b2bf96196edc5730edb347e374c7de7cd20c43ed95397", - "sha256:adfdeab2da79ea2f76f87eb42a3ab1966a5313e5a69a0213a3cc06ef692b0e96", - "sha256:ba07e92756c97e3aca0912b5cbc4e5ad802f4557212788e72a72a47ff376950d", - "sha256:c07253088265c363d1ddf4b3cdb808d59a0468ecd017770ed716991620b8f77a", - "sha256:c9d8c8ec7efb887333cf71e328e39cffbf771d8f8f95d308ea4125bf5f90ba64", - "sha256:d00e6be486affb5781468457b21a6cbe848c33ef43f9ea4a73b4882e5f188a44", - "sha256:d429c2430c93b7903914e4db9a966c7f2b068dd2ebdd2fa9b9ce094c7d459f33" - ], - "markers": "python_version >= '3.7'", - "version": "==3.0.0" - }, - "webencodings": { - "hashes": [ - "sha256:a0af1213f3c2226497a97e2b3aa01a7e4bee4f403f95be16fc9acd2947514a78", - "sha256:b36a1c245f2d304965eb4e0a82848379241dc04b865afcc4aab16748587e1923" - ], - "version": "==0.5.1" - }, - "werkzeug": { - "hashes": [ - "sha256:2e1ccc9417d4da358b9de6f174e3ac094391ea1d4fbef2d667865d819dfd0afe", - "sha256:56433961bc1f12533306c624f3be5e744389ac61d722175d543e1751285da612" - ], - "markers": "python_version >= '3.7'", - "version": "==2.2.3" - }, - "wheel": { - "hashes": [ - "sha256:cd1196f3faee2b31968d626e1731c94f99cbdb67cf5a46e4f5656cbee7738873", - "sha256:d236b20e7cb522daf2390fa84c55eea81c5c30190f90f29ae2ca1ad8355bf247" - ], - "markers": "python_version >= '3.7'", - "version": "==0.40.0" - }, - "wrapt": { - "hashes": [ - "sha256:02fce1852f755f44f95af51f69d22e45080102e9d00258053b79367d07af39c0", - "sha256:077ff0d1f9d9e4ce6476c1a924a3332452c1406e59d90a2cf24aeb29eeac9420", - "sha256:078e2a1a86544e644a68422f881c48b84fef6d18f8c7a957ffd3f2e0a74a0d4a", - "sha256:0970ddb69bba00670e58955f8019bec4a42d1785db3faa043c33d81de2bf843c", - "sha256:1286eb30261894e4c70d124d44b7fd07825340869945c79d05bda53a40caa079", - "sha256:21f6d9a0d5b3a207cdf7acf8e58d7d13d463e639f0c7e01d82cdb671e6cb7923", - "sha256:230ae493696a371f1dbffaad3dafbb742a4d27a0afd2b1aecebe52b740167e7f", - "sha256:26458da5653aa5b3d8dc8b24192f574a58984c749401f98fff994d41d3f08da1", - "sha256:2cf56d0e237280baed46f0b5316661da892565ff58309d4d2ed7dba763d984b8", - "sha256:2e51de54d4fb8fb50d6ee8327f9828306a959ae394d3e01a1ba8b2f937747d86", - "sha256:2fbfbca668dd15b744418265a9607baa970c347eefd0db6a518aaf0cfbd153c0", - "sha256:38adf7198f8f154502883242f9fe7333ab05a5b02de7d83aa2d88ea621f13364", - "sha256:3a8564f283394634a7a7054b7983e47dbf39c07712d7b177b37e03f2467a024e", - "sha256:3abbe948c3cbde2689370a262a8d04e32ec2dd4f27103669a45c6929bcdbfe7c", - "sha256:3bbe623731d03b186b3d6b0d6f51865bf598587c38d6f7b0be2e27414f7f214e", - "sha256:40737a081d7497efea35ab9304b829b857f21558acfc7b3272f908d33b0d9d4c", - "sha256:41d07d029dd4157ae27beab04d22b8e261eddfc6ecd64ff7000b10dc8b3a5727", - "sha256:46ed616d5fb42f98630ed70c3529541408166c22cdfd4540b88d5f21006b0eff", - "sha256:493d389a2b63c88ad56cdc35d0fa5752daac56ca755805b1b0c530f785767d5e", - "sha256:4ff0d20f2e670800d3ed2b220d40984162089a6e2c9646fdb09b85e6f9a8fc29", - "sha256:54accd4b8bc202966bafafd16e69da9d5640ff92389d33d28555c5fd4f25ccb7", - "sha256:56374914b132c702aa9aa9959c550004b8847148f95e1b824772d453ac204a72", - "sha256:578383d740457fa790fdf85e6d346fda1416a40549fe8db08e5e9bd281c6a475", - "sha256:58d7a75d731e8c63614222bcb21dd992b4ab01a399f1f09dd82af17bbfc2368a", - "sha256:5c5aa28df055697d7c37d2099a7bc09f559d5053c3349b1ad0c39000e611d317", - "sha256:5fc8e02f5984a55d2c653f5fea93531e9836abbd84342c1d1e17abc4a15084c2", - "sha256:63424c681923b9f3bfbc5e3205aafe790904053d42ddcc08542181a30a7a51bd", - "sha256:64b1df0f83706b4ef4cfb4fb0e4c2669100fd7ecacfb59e091fad300d4e04640", - "sha256:74934ebd71950e3db69960a7da29204f89624dde411afbfb3b4858c1409b1e98", - "sha256:75669d77bb2c071333417617a235324a1618dba66f82a750362eccbe5b61d248", - "sha256:75760a47c06b5974aa5e01949bf7e66d2af4d08cb8c1d6516af5e39595397f5e", - "sha256:76407ab327158c510f44ded207e2f76b657303e17cb7a572ffe2f5a8a48aa04d", - "sha256:76e9c727a874b4856d11a32fb0b389afc61ce8aaf281ada613713ddeadd1cfec", - "sha256:77d4c1b881076c3ba173484dfa53d3582c1c8ff1f914c6461ab70c8428b796c1", - "sha256:780c82a41dc493b62fc5884fb1d3a3b81106642c5c5c78d6a0d4cbe96d62ba7e", - "sha256:7dc0713bf81287a00516ef43137273b23ee414fe41a3c14be10dd95ed98a2df9", - "sha256:7eebcdbe3677e58dd4c0e03b4f2cfa346ed4049687d839adad68cc38bb559c92", - "sha256:896689fddba4f23ef7c718279e42f8834041a21342d95e56922e1c10c0cc7afb", - "sha256:96177eb5645b1c6985f5c11d03fc2dbda9ad24ec0f3a46dcce91445747e15094", - "sha256:96e25c8603a155559231c19c0349245eeb4ac0096fe3c1d0be5c47e075bd4f46", - "sha256:9d37ac69edc5614b90516807de32d08cb8e7b12260a285ee330955604ed9dd29", - "sha256:9ed6aa0726b9b60911f4aed8ec5b8dd7bf3491476015819f56473ffaef8959bd", - "sha256:a487f72a25904e2b4bbc0817ce7a8de94363bd7e79890510174da9d901c38705", - "sha256:a4cbb9ff5795cd66f0066bdf5947f170f5d63a9274f99bdbca02fd973adcf2a8", - "sha256:a74d56552ddbde46c246b5b89199cb3fd182f9c346c784e1a93e4dc3f5ec9975", - "sha256:a89ce3fd220ff144bd9d54da333ec0de0399b52c9ac3d2ce34b569cf1a5748fb", - "sha256:abd52a09d03adf9c763d706df707c343293d5d106aea53483e0ec8d9e310ad5e", - "sha256:abd8f36c99512755b8456047b7be10372fca271bf1467a1caa88db991e7c421b", - "sha256:af5bd9ccb188f6a5fdda9f1f09d9f4c86cc8a539bd48a0bfdc97723970348418", - "sha256:b02f21c1e2074943312d03d243ac4388319f2456576b2c6023041c4d57cd7019", - "sha256:b06fa97478a5f478fb05e1980980a7cdf2712015493b44d0c87606c1513ed5b1", - "sha256:b0724f05c396b0a4c36a3226c31648385deb6a65d8992644c12a4963c70326ba", - "sha256:b130fe77361d6771ecf5a219d8e0817d61b236b7d8b37cc045172e574ed219e6", - "sha256:b56d5519e470d3f2fe4aa7585f0632b060d532d0696c5bdfb5e8319e1d0f69a2", - "sha256:b67b819628e3b748fd3c2192c15fb951f549d0f47c0449af0764d7647302fda3", - "sha256:ba1711cda2d30634a7e452fc79eabcadaffedf241ff206db2ee93dd2c89a60e7", - "sha256:bbeccb1aa40ab88cd29e6c7d8585582c99548f55f9b2581dfc5ba68c59a85752", - "sha256:bd84395aab8e4d36263cd1b9308cd504f6cf713b7d6d3ce25ea55670baec5416", - "sha256:c99f4309f5145b93eca6e35ac1a988f0dc0a7ccf9ccdcd78d3c0adf57224e62f", - "sha256:ca1cccf838cd28d5a0883b342474c630ac48cac5df0ee6eacc9c7290f76b11c1", - "sha256:cd525e0e52a5ff16653a3fc9e3dd827981917d34996600bbc34c05d048ca35cc", - "sha256:cdb4f085756c96a3af04e6eca7f08b1345e94b53af8921b25c72f096e704e145", - "sha256:ce42618f67741d4697684e501ef02f29e758a123aa2d669e2d964ff734ee00ee", - "sha256:d06730c6aed78cee4126234cf2d071e01b44b915e725a6cb439a879ec9754a3a", - "sha256:d5fe3e099cf07d0fb5a1e23d399e5d4d1ca3e6dfcbe5c8570ccff3e9208274f7", - "sha256:d6bcbfc99f55655c3d93feb7ef3800bd5bbe963a755687cbf1f490a71fb7794b", - "sha256:d787272ed958a05b2c86311d3a4135d3c2aeea4fc655705f074130aa57d71653", - "sha256:e169e957c33576f47e21864cf3fc9ff47c223a4ebca8960079b8bd36cb014fd0", - "sha256:e20076a211cd6f9b44a6be58f7eeafa7ab5720eb796975d0c03f05b47d89eb90", - "sha256:e826aadda3cae59295b95343db8f3d965fb31059da7de01ee8d1c40a60398b29", - "sha256:eef4d64c650f33347c1f9266fa5ae001440b232ad9b98f1f43dfe7a79435c0a6", - "sha256:f2e69b3ed24544b0d3dbe2c5c0ba5153ce50dcebb576fdc4696d52aa22db6034", - "sha256:f87ec75864c37c4c6cb908d282e1969e79763e0d9becdfe9fe5473b7bb1e5f09", - "sha256:fbec11614dba0424ca72f4e8ba3c420dba07b4a7c206c8c8e4e73f2e98f4c559", - "sha256:fd69666217b62fa5d7c6aa88e507493a34dec4fa20c5bd925e4bc12fce586639" - ], - "markers": "python_version < '3.11'", - "version": "==1.15.0" - }, - "xmltodict": { - "hashes": [ - "sha256:341595a488e3e01a85a9d8911d8912fd922ede5fecc4dce437eb4b6c8d037e56", - "sha256:aa89e8fd76320154a40d19a0df04a4695fb9dc5ba977cbb68ab3e4eb225e7852" - ], - "markers": "python_version >= '3.4'", - "version": "==0.13.0" - }, - "zipp": { - "hashes": [ - "sha256:112929ad649da941c23de50f356a2b5570c954b65150642bccdd66bf194d224b", - "sha256:48904fc76a60e542af151aded95726c1a5c34ed43ab4134b597665c86d7ad556" - ], - "markers": "python_version < '3.10'", - "version": "==3.15.0" - } - } -} diff --git a/README.md b/README.md index 1a1bf525..9c2e669b 100644 --- a/README.md +++ b/README.md @@ -1,30 +1,57 @@ +## Introduction + +The AWS Config Rules Development Kit helps developers set up, author and test custom Config rules. It contains scripts to enable AWS Config, create a Config rule and test it with sample ConfigurationItems. + # Summary This branch of RDK is for the alpha-testing of RDK v1.0. RDK v1.0 will feature several changes to make RDK more useful and maintainable in the long term. The top changes are: + - Support for CfnGuard Rules - Changing back-end deployment methodology from CloudFormation to CDK - Refactoring the monolithic `rdk.py` file into individual files for each RDK command. Because these changes have the potential to be breaking changes, this will initially be released using a non-semantic version (eg. alpha-1.0.0) so that existing RDK pipelines are not impacted. -# TODO +# CDK Overview + +RDK v1.0 uses CDK to create the CloudFormation stacks that were previously created using raw CloudFormation in RDK v0.x. + +A call to `rdk deploy` will invoke several CDK commands in order: + +- `diff`: determine if changes are required, stopping if no changes are required +- `bootstrap`: configure the AWS environment for CDK use +- `deploy`: apply the CFT to your AWS account + +These commands will be run in the context of the standard RDK CDK App directory, defined in `rdk\frameworks\cdk`. The commands are run using Python's `subprocess` module. + +## Under-the-hood + +What actually happens when an `rdk deploy` command is issued? -Add README.md from RDK v0 here. +1. The `rdk` application runs, and recognizes the command as a `deploy`. +2. The `deploy` helper function creates a `RulesDeploy` object. + +3. The `RulesDeploy` object sets up a CDK runner to `diff/bootstrap/synth/deploy` the CDK template. # Developer Instructions These steps are used for developers who want to make and test changes to the RDK source code and compile an RDK executable. -You can also run `python -m rdk` from the root directory to run RDK from the script (will be slow to run). +You can run `python -m rdk` from the root directory to run RDK from the script (will be slow to run). Example: + +```bash +python -m rdk deploy --all --rules-dir .\tests\integration\rdk-cdk-rule-dir\ +``` -You can attach the CLI to the debugger using `python -m debugpy --listen 5678 rdk deploy` +You can attach the CLI to the debugger using `python -m debugpy --listen 5678 rdk deploy --all --rules-dir .\tests\integration\rdk-cdk-rule-dir\` CDK may attempt to run `python3`, which could cause issues on Windows systems where Python is often just named `python.exe`. Copying `python.exe` to `python3.exe` is a workaround for this issue. ## Windows venv instructions + - `virtualenv myenv` - `myenv\Scripts\activate` @@ -34,17 +61,50 @@ Note: if using a virtual environment on Windows, you may need to `pip install -r Install cfn-guard: https://docs.aws.amazon.com/cfn-guard/latest/ug/setting-up-linux.html -## Set up your local environment -`make freeze` -`make init` - # Activate pipenv -`pipenv shell` -# Navigate to rules dir in integration test -`cd tests/integration/rdk-cdk-int-rules-dir` +`pipenv shell` # Run RDK command for testing + `rdk test` `rdk deploy` `rdk destroy` + +## Prerequisites + +RDK requires `cdk` version 2 (or higher) to be installed and available in the `PATH`. + +RDK is developed in Python and requires Python v3.8 (or higher). + +## Installing RDK + +RDK is distributed as a Python Package (`rdk`). You can install it using `pip` or other common Python methods. + +### Using `pip` + +_CLI_: + +```bash +pip install 'rdk>=1,<2' +``` + +_requirements.txt_: + +```text +rdk>=1,<2 +``` + +# TODOs + +- Determine the right level of verbosity and make it easy to configure verbosity + +- Keep adding more features from RDK 0.x + +- Integrate README.md from RDK v0 here. + +- Determine whether all rules should be deployed to a single CFN Stack or whether each rule should get its own CFN Stack. The former is probably faster to deploy, but the latter matches RDK v0. + +- Validate that all the requirements, etc. are contained in the `pyproject.toml` file. The goal is to keep the project lightweight (not a lot of random configuration files) but keep a good level of functionality. + +- Verify that `rdklib` and `rdk` runtimes both function correctly. diff --git a/docs/faq.md b/docs/faq.md deleted file mode 100644 index b81fe619..00000000 --- a/docs/faq.md +++ /dev/null @@ -1,3 +0,0 @@ -## What is RDK? - -The AWS Config Rules Development Kit helps developers set up, author and test custom Config rules. It contains scripts to enable AWS Config, create a Config rule and test it with sample ConfigurationItems. diff --git a/docs/install.md b/docs/install.md deleted file mode 100644 index eb2a5b21..00000000 --- a/docs/install.md +++ /dev/null @@ -1,74 +0,0 @@ -## Prerequisites - -RDK requires `cdk` version 2 (or higher) to be installed and available in the `PATH`. - -RDK is developed in Python and requires Python v3.8 (or higher). - -## Installing RDK - -RDK is distributed as a Python Package (`rdk`) - -### Using `pip` - -_CLI_: - -```bash -pip install 'rdk>=1,<2' -``` - -_requirements.txt_: - -```text -rdk>=1,<2 -``` - -### Using `pipenv` - -_CLI_: - -```bash -pipenv install 'rdk>=1,<2' -``` - -_Pipfile_: - -```toml -[[source]] -name = "pypi" -verify_ssl = true - -[packages] -rdk = ">=1,<2" -``` - -### Using `poetry` - -_CLI_: - -```bash -poetry add 'rdk>=1,<2' -``` - -_pyproject.toml_: - -```toml -[tool.poetry] -[[tool.poetry.source]] -name = "pypi" -default = true - -[tool.poetry.dependencies] -rdk = ">=1,<2" -``` - -### Using `pipx` - -```bash -pipx install 'rdk>=1,<2' -``` - -### Using `conda` - -```bash -conda install 'rdk>=1,<2' -``` diff --git a/docs/usage/development.md b/docs/usage/development.md deleted file mode 100644 index e69de29b..00000000 diff --git a/docs/usage/getting-started.md b/docs/usage/getting-started.md deleted file mode 100644 index 1dfc80f7..00000000 --- a/docs/usage/getting-started.md +++ /dev/null @@ -1,3 +0,0 @@ -## Introduction - -The AWS Config Rules Development Kit helps developers set up, author and test custom Config rules. It contains scripts to enable AWS Config, create a Config rule and test it with sample ConfigurationItems. \ No newline at end of file diff --git a/mkdocs.yml b/mkdocs.yml index 8f41dce0..89a345e5 100644 --- a/mkdocs.yml +++ b/mkdocs.yml @@ -1,62 +1,17 @@ -################################################################################ -# MKDOCS CONFIGURATIONS -################################################################################ - -# -# Reference: -# https://www.mkdocs.org/user-guide/configuration/ -# - -# ------------------------------------------------------------------------------ -# SITE CONFIGURATIONS -# ------------------------------------------------------------------------------ - -site_name: RDK User Guide -site_url: - -repo_url: https://github.com/awslabs/aws-config-rdk -repo_name: Github - +site_name: AWS RDK Documentation theme: name: material palette: - - media: '(prefers-color-scheme: light)' scheme: default - primary: blue - accent: red - toggle: - icon: material/toggle-switch-off-outline - name: Switch to dark mode - - media: '(prefers-color-scheme: dark)' - scheme: slate - primary: blue - accent: red - toggle: - icon: material/toggle-switch - name: Switch to light mode - features: - - navigation.sections - - navigation.top - + primary: orange +plugins: + - search + # TODO: Enable this if/when docstrings are expanded in the core rdk module. + # - mkdocstrings: + # handlers: + # python: + # paths: [rdk] markdown_extensions: -- toc: - permalink: true - -# ------------------------------------------------------------------------------ -# NAVIGATION -# ------------------------------------------------------------------------------ - -nav: -- Home: index.md -- Install: install.md -- FAQ: faq.md -- Usage: - - Getting Started: usage/getting-started.md - - AWS Configurations: usage/aws-configurations.md - - Developing Test Cases: usage/development.md -- Reference: - - Command Line: reference/cli.md - - Test Case: reference/test-case.md - - API: reference/api/ - -################################################################################ + - markdown_include.include: + base_path: . +docs_dir: docs diff --git a/mypy.ini b/mypy.ini deleted file mode 100644 index 6710a685..00000000 --- a/mypy.ini +++ /dev/null @@ -1,25 +0,0 @@ -############################################################################### -# MYPY CONFIGURATIONS -############################################################################### - -# -# Reference: -# https://mypy.readthedocs.io/en/stable/config_file.html -# - -[mypy] -# Behavior -ignore_missing_imports = True - -# Outputs -pretty = True -show_error_codes = True - -# Reports -html_report = .reports/mypy-html -junit_xml = .reports/mypy.xml - -# Excludes -exclude = mugc.py - -############################################################################### diff --git a/pyproject.toml b/pyproject.toml index e5a40d1c..390e0c26 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,124 +1,122 @@ -############################################################################### -# PYTHON PROJECT CONFIGS -############################################################################### +# Copyright 2023 Amazon.com, Inc. or its affiliates. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at +# +# http://aws.amazon.com/apache2.0/ +# +# or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. +[tool.poetry] +name = "rdk" +version = "1.0.0" +description = "Rule Development Kit CLI for AWS Config" +authors = [ + "AWS RDK Maintainers ", +] +repository = "https://github.com/awslabs/aws-config-rdk" +homepage = "https://github.com/awslabs/aws-config-rdk" +readme = "README.md" +packages = [{include = "rdk"}] +keywords = ["amazon", "aws", "awslabs", "rdk", "config", "rules", "compliance"] +documentation = "https://aws-config-rdk.readthedocs.io" +classifiers = [ + "License :: OSI Approved :: Apache Software License", + "Programming Language :: Python", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", +] +include = [ + "README.md", + "NOTICE.txt", + "LICENSE", + "rdk/template/*", + "rdk/template/terraform/*", + "rdk/template/terraform/0.11/*", + "rdk/template/terraform/0.12/*", + "rdk/template/example_ci/*", + "rdk/template/runtime/*", + "rdk/template/runtime/python3.7/*", + "rdk/template/runtime/python3.7-lib/*", + "rdk/template/runtime/python3.8/*", + "rdk/template/runtime/python3.8-lib/*", + "rdk/template/runtime/python3.9/*", + "rdk/template/runtime/python3.9-lib/*", + "rdk/template/runtime/python3.10/*", + "rdk/template/runtime/python3.10-lib/*", +] +license = "Apache-2.0" -# ----------------------------------------------------------------------------- -# BLACK -# ----------------------------------------------------------------------------- -[tool.black] -target-version = ["py38"] +[tool.poetry.scripts] +rdk = "rdk.cli:main" -# ----------------------------------------------------------------------------- -# ISORT -# ----------------------------------------------------------------------------- +[tool.bandit] +exclude_dirs = ["tests"] + +# Styling and linting Configurations [tool.isort] profile = "black" -virtual_env = ".venv" - -# ----------------------------------------------------------------------------- -# PYLINT -# ----------------------------------------------------------------------------- -[tool.pylint.MASTER] - -# Load Plugins -# pylint_pytest: Suppress pytest related false-positives -# pylint_json2html: Support jsonextended output-format -load-plugins = "pylint_pytest,pylint_json2html" - -# Use multiple processes to speed up Pylint. Specifying 0 will auto-detect the -# number of processors available to use. -jobs = 0 - -# Pickle collected data for later comparisons. -persistent = "no" - -# Min score threshold -fail-under = "9.0" - -# Files or directories to be skipped. They should be base names, not paths. -ignore = "mugc.py" - -[tool.pylint.BASIC] - -# Naming conventions -# Allowed: lower-cased single words, or snake_case words -attr-rgx = "^(?:[a-z0-9_]+)$" -argument-rgx = "^(?:[a-z0-9_]+)$" -variable-rgx = "^(?:[a-z0-9_]+)$" -inlinevar-rgx = "^(?:[a-z0-9_]+)$" - -# Display hints on above naming conventions -include-naming-hint = "yes" +line_length = 120 -[tool.pylint."MESSAGES CONTROL"] - -# Why? -# * broad-except: We are trying to catch all exceptions -# * duplicate-code: Caused by similar import statements -# * line-too-long: black takes care of this -# * logging-fstring-interpolation: why would we NOT use f-strings ever -# * missing-module-docstring: we document each function/class/method - -disable = """ -broad-except, -duplicate-code, -line-too-long, -logging-fstring-interpolation, -missing-module-docstring, -""" - -# ----------------------------------------------------------------------------- -# PYTEST -# ----------------------------------------------------------------------------- -[tool.pytest.ini_options] - -# classes called `Test` -# https://docs.pytest.org/en/latest/example/pythoncollection.html#changing-naming-conventions -python_files = "test_*.py" -python_classes = "PyTest" -python_functions = "test_*" - -# CLI Options -addopts = """\ - --exitfirst \ - --strict-config \ - -vv \ - -p no:warnings \ - --junit-xml=.reports/junit.xml \ - --report-log=.reports/pytest-log.json \ - --cov=rdk \ - --cov-config=.coveragerc \ - --no-cov-on-fail \ - --cov-report=term \ - --cov-report=html \ - --cov-report=xml - """ - -# Enable `pytest` CLI logging -log_cli = true -log_cli_level = "INFO" - -# Set package log level -log_level = "DEBUG" -log_date_format = "%Y-%m-%dT%H:%M:%S%z" -log_format = "%(asctime)s | %(levelname)-8s | %(message)s" - -# Junit suite-name -junit_suite_name = "rdk" - -# pytest-console-scripts options -script_launch_mode = "subprocess" - -# pytest-env -env = [ - # Fake creds for moto - "AWS_ACCESS_KEY_ID=testing", - "AWS_SECRET_ACCESS_KEY=testing", - "AWS_SESSION_TOKEN=testing", - "AWS_SECURITY_TOKEN=testing", - "AWS_DEFAULT_REGION=us-east-1", - "AWS_REGION=us-east-1", - "MOTO_ACCOUNT_ID=123456789012", -] - -############################################################################### +[tool.black] +line-length = 120 +target-version = ["py310"] + +[tool.ruff] +line-length = 120 +target-version = "py310" + +[tool.poe.tasks] +isort = "isort --profile=black ." +black = "black ." +check-black = {cmd = "black . --check --diff", help = "Check code for black styling"} +check-isort = {cmd = "isort --check --profile=black .", help = "Check code for import styling"} +check-docstrings = "pydocstyle -e ." +check-ruff = "ruff check rdk" +check = ["check-isort", "check-black"] +lint = ["check-docstrings", "check-ruff"] +fix = ["isort", "black"] +# test = "pytest --cov=rdk --cov-report=xml --cov-report=term" +ruff = "ruff check --fix rdk" +safety = "safety check" +bandit = "bandit -r rdk" +security = ["safety", "bandit"] +update-doc-deps = {cmd = "poetry export --only=docs -f requirements.txt > docs/requirements.txt", help = "Generate an updated requirements.txt for docs" } +serve-docs = {cmd = "mkdocs serve"} +# requires poethepoet outside of poetry. +install = "poetry install" +build = "poetry build" + +[tool.poetry.dependencies] +python = "^3.7.2" +boto3 = "^1.26.139" +pyyaml = "^6.0" + +[tool.poetry.group.dev.dependencies] +rdklib = "^0.3.0" +black = "^22.12.0" +pydocstyle = "^6.3.0" +isort = {extras = ["toml"], version = "^5.11.4"} +mypy = "^1.3.0" +debugpy = "^1.6.7" +ruff = "^0.0.269" + +[tool.poetry.group.security.dependencies] +bandit = "^1.7.5" +safety = "^2.3.5" + +[tool.poetry.group.types.dependencies] +types-pyyaml = "^6.0.12.10" +boto3-stubs = {extras = ["cloudformation", "config", "iam", "s3", "sts"], version = "^1.26.139"} + + +[tool.poetry.group.docs.dependencies] +mkdocs = "^1.4.3" +mkdocs-material = "^9.1.14" +mkdocstrings-python = "^1.0.0" +markdown-include = "^0.8.1" + +[build-system] +requires = ["poetry-core"] +build-backend = "poetry.core.masonry.api" \ No newline at end of file diff --git a/rdk/cli/commands/create.py b/rdk/cli/commands/create.py index 566f63a8..23145dba 100644 --- a/rdk/cli/commands/create.py +++ b/rdk/cli/commands/create.py @@ -11,4 +11,4 @@ def run(): logger = get_main_logger() logger.info("AWS Config create is starting ...") - sys.exit(print("RDK create")) + sys.exit(print("NOT IMPLEMENTED YET - RDK would create a new rule folder")) diff --git a/rdk/cli/commands/deploy.py b/rdk/cli/commands/deploy.py index 4ab2b72a..914e223a 100644 --- a/rdk/cli/commands/deploy.py +++ b/rdk/cli/commands/deploy.py @@ -1,15 +1,19 @@ import sys +from pathlib import Path + from typing import Any, Callable, Dict, List, Optional from rdk.core.rules_deploy import RulesDeploy from rdk.utils.logger import get_main_logger -def run(rulenames: List[str], dryrun: bool): +def run(rulenames: List[str], dryrun: bool, rules_dir: str): """ - test sub-command handler. + Deploy sub-command handler. """ logger = get_main_logger() logger.info("RDK is starting ...") - sys.exit(RulesDeploy(rulenames=rulenames, dryrun=dryrun).run()) + sys.exit( + RulesDeploy(rulenames=rulenames, dryrun=dryrun, rules_dir=Path(rules_dir)).run() + ) diff --git a/rdk/cli/commands/deploy_organization.py b/rdk/cli/commands/deploy_organization.py index b726cedc..e5e5b88d 100644 --- a/rdk/cli/commands/deploy_organization.py +++ b/rdk/cli/commands/deploy_organization.py @@ -11,4 +11,4 @@ def run(): logger = get_main_logger() logger.info("AWS Config deploy organization is starting ...") - sys.exit(print("RDK deploying to organization")) + sys.exit(print("NOT IMPLEMENTED YET - RDK would deploy to Organization")) diff --git a/rdk/cli/commands/destroy.py b/rdk/cli/commands/undeploy.py similarity index 55% rename from rdk/cli/commands/destroy.py rename to rdk/cli/commands/undeploy.py index ae14b6e4..3fc41747 100644 --- a/rdk/cli/commands/destroy.py +++ b/rdk/cli/commands/undeploy.py @@ -1,16 +1,19 @@ import sys from typing import Any, Callable, Dict, List, Optional - +from pathlib import Path from rdk.core.rules_deploy import RulesDeploy from rdk.utils.logger import get_main_logger -# TODO - should this be named undeploy for consistency with RDK v0? -def run(rulenames: List[str], dryrun: bool): +def run(rulenames: List[str], dryrun: bool, rules_dir: str): """ test sub-command handler. """ logger = get_main_logger() logger.info("Destroying RDK rules ...") - sys.exit(RulesDeploy(rulenames=rulenames, dryrun=dryrun).destroy()) + sys.exit( + RulesDeploy( + rulenames=rulenames, dryrun=dryrun, rules_dir=Path(rules_dir) + ).destroy() + ) diff --git a/rdk/cli/main.py b/rdk/cli/main.py index 470a1d46..b0ad532b 100644 --- a/rdk/cli/main.py +++ b/rdk/cli/main.py @@ -7,7 +7,7 @@ import rdk.cli.commands.deploy as deploy_cmd import rdk.cli.commands.init as init_cmd import rdk.cli.commands.test as test_cmd -import rdk.cli.commands.destroy as destroy_cmd +import rdk.cli.commands.undeploy as destroy_cmd import rdk.cli.commands.sample_ci as sample_ci_cmd import rdk.utils.logger as rdk_logger from rdk.core.get_accepted_resource_types import get_accepted_resource_types @@ -57,47 +57,67 @@ def main(): help=f"Use {this_pkg.NAME} --help for detailed usage", ) - # init + # Reusable arguments + rulename_arg = { + "dest": "rulename", + "metavar": "", + "nargs": "*", + "default": "", + "help": "Rule name(s) to perform this command on.", + } + dryrun_name_or_flags = [ + "-n", + "--dryrun", + ] + dryrun_arg = { + "dest": "dryrun", + "action": "store_true", + "default": False, + "help": "Dry run mode", + } + + all_arg = { + "dest": "all", + "action": "store_true", + "default": False, + "help": "If specified, runs the RDK command for all rules in the directory.", + } + + rule_dir_arg = { + "dest": "rules_dir", + "default": os.getcwd(), + "help": "This arg is mainly used for testing -- it allows you to specify a different rule directory than the CWD as the holder of RDK rule folders", + } + + # COMMAND-SPECIFIC PARSERS + + # INIT commands_parser.add_parser( "init", help="Sets up AWS Config. This will enable configuration recording in AWS and ensure necessary S3 buckets and IAM Roles are created.", ) - # deploy + # DEPLOY commands_parser_deploy = commands_parser.add_parser( "deploy", help="deploy AWS Config Rules", ) - commands_parser_deploy.add_argument( - "rulename", - metavar="", - nargs="*", - default="", - help="Rule name(s) to deploy. Rule(s) will be pushed to AWS.", - ) + # Can either specify rule names or --all + rule_args_parser_deploy = commands_parser_deploy.add_mutually_exclusive_group() + rule_args_parser_deploy.add_argument(**rulename_arg) + rule_args_parser_deploy.add_argument("--all", **all_arg) - commands_parser_deploy.add_argument( - "-n", - "--dryrun", - action="store_true", - default=False, - help="Dry run mode", - ) + commands_parser_deploy.add_argument("--rules-dir", **rule_dir_arg) + commands_parser_deploy.add_argument(*dryrun_name_or_flags, **dryrun_arg) - # test + # TEST commands_parser_test = commands_parser.add_parser( "test", help="deploy AWS Config Rules", ) - commands_parser_test.add_argument( - "rulename", - metavar="", - nargs="*", - default="", - help="Rule name(s) to test. Unit test of the rule(s) will be executed.", - ) + commands_parser_test.add_argument(**rulename_arg) commands_parser_test.add_argument( "-v", @@ -107,34 +127,20 @@ def main(): help="Verbose mode", ) - # destroy + # UNDEPLOY commands_parser_destroy = commands_parser.add_parser( - "destroy", + "undeploy", help="destroy AWS Config Rules", ) - commands_parser_destroy.add_argument( - "rulename", - metavar="", - nargs="*", - default="", - help="Rule name(s) to destroy. Rule(s) will be removed.", - ) - - commands_parser_destroy.add_argument( - "-n", - "--dryrun", - action="store_true", - default=False, - help="Dry run mode", - ) + rule_args_parser_destroy = commands_parser_destroy.add_mutually_exclusive_group() + rule_args_parser_destroy.add_argument(**rulename_arg) + rule_args_parser_destroy.add_argument("--all", **all_arg) - # _pytest -- hidden command used by pytests - commands_parser.add_parser( - "_pytest", - ) + commands_parser_destroy.add_argument("--rules-dir", **rule_dir_arg) + commands_parser_destroy.add_argument(*dryrun_name_or_flags, **dryrun_arg) - # sample-ci + # SAMPLE-CI commands_parser_sample_ci = commands_parser.add_parser( "sample-ci", help="Provides a way to see sample configuration items for most supported resource types.", @@ -147,6 +153,11 @@ def main(): choices=get_accepted_resource_types(), ) + # _pytest -- hidden command used by pytests + commands_parser.add_parser( + "_pytest", + ) + # Parse all args and commands args = main_parser.parse_args() @@ -169,9 +180,17 @@ def main(): # handle: deploy if args.command == "deploy": + # Any subdirectory of rules_dir with a parameters.json file in it is assumed to be a Rule + if args.all: + rulenames = [ + f.name + for f in os.scandir(args.rules_dir) + if f.is_dir() and os.path.exists(os.path.join(f, "parameters.json")) + ] + else: + rulenames = args.rulename deploy_cmd.run( - rulenames=args.rulename, - dryrun=args.dryrun, + rulenames=rulenames, dryrun=args.dryrun, rules_dir=args.rules_dir ) # handle: test @@ -181,11 +200,14 @@ def main(): verbose=args.verbose, ) - # handle: destroy - if args.command == "destroy": + # handle: undeploy + if args.command == "undeploy": + if args.all: + rulenames = [f.name for f in os.scandir(args.rules_dir) if f.is_dir()] + else: + rulenames = args.rulename destroy_cmd.run( - rulenames=args.rulename, - dryrun=args.dryrun, + rulenames=args.rulename, dryrun=args.dryrun, rules_dir=args.rules_dir ) # handle: sample-ci diff --git a/rdk/core/rules_deploy.py b/rdk/core/rules_deploy.py index b2f748eb..4e166942 100644 --- a/rdk/core/rules_deploy.py +++ b/rdk/core/rules_deploy.py @@ -1,4 +1,5 @@ import logging +import sys import time from dataclasses import dataclass, field from pathlib import Path @@ -21,36 +22,48 @@ class RulesDeploy: rulenames: List[str] dryrun: bool + rules_dir: Path logger: logging.Logger = field(init=False) def __post_init__(self): self.logger = rdk_logger.get_main_logger() + def runner_setup(self): + """ + Validate rule arguments and create a CDK Runner + """ + if not self.rules_dir: + self.logger.error( + "Invalid option, must specify a rule name, rule set, or explicitly use '--all'." + ) + sys.exit(0) + # This logic ensures that the rule name will be used for stack names/rule names instead of the full path + rule_names_no_path = [] + for rulepath in self.rulenames: + rule_names_no_path.append(Path(rulepath).name) + self.cdk_runner = CdkRunner( + rules_dir=self.rules_dir, + rulenames=rule_names_no_path, + ) + def run(self): """ Runs Rules Deployment. """ - if len(self.rulenames) > 0: - rules_dir = Path(self.rulenames[0]) - else: - rules_dir = Path().absolute() + self.runner_setup() - cdk_runner = CdkRunner(rules_dir=rules_dir) - - cdk_runner.diff() - cdk_runner.synthesize() - cdk_runner.bootstrap() - cdk_runner.deploy() + stacks_to_deploy = self.cdk_runner.diff() + if not stacks_to_deploy: + self.logger.info("No changes to deploy.") + return + # self.cdk_runner.synthesize() # cdk diff will perform a synth behind the scenes, making this synth unnecessary + self.cdk_runner.bootstrap() # TODO - parameter to skip bootstrap? Could speed things up a bit + self.cdk_runner.deploy(stacks_to_deploy) def destroy(self): """ Destroy Rules Deployment. """ - if len(self.rulenames) > 0: - rules_dir = Path(self.rulenames[0]) - else: - rules_dir = Path().absolute() - - cdk_runner = CdkRunner(rules_dir=rules_dir) - cdk_runner.destroy() + self.runner_setup() + self.cdk_runner.destroy() diff --git a/rdk/frameworks/cdk/app.py b/rdk/frameworks/cdk/app.py index 26e23e65..75e77a2e 100644 --- a/rdk/frameworks/cdk/app.py +++ b/rdk/frameworks/cdk/app.py @@ -1,23 +1,40 @@ #!/usr/bin/env python import os +from pathlib import Path import aws_cdk as cdk from cdk.cdk_stack import CdkStack +from aws_cdk import DefaultStackSynthesizer +""" +NOTES + +This CDK app is expected to be executed from the `frameworks\cdk` folder. + +This module supports two execution modes. +1. ALL +Deploys all the rules in the rules directory +A CFT stack will be created/updated for each rule in the directory + +2. Specific Rules +Deploys the specified rules that were passed to the RDK CLI +A CFT stack will be created/updated for each specified rule +""" app = cdk.App() -CdkStack( - app, - "CdkStack", - # If you don't specify 'env', this stack will be environment-agnostic. - # Account/Region-dependent features and context lookups will not work, - # but a single synthesized template can be deployed anywhere. - # Uncomment the next line to specialize this stack for the AWS Account - # and Region that are implied by the current CLI configuration. - # env=cdk.Environment(account=os.getenv('CDK_DEFAULT_ACCOUNT'), region=os.getenv('CDK_DEFAULT_REGION')), - # Uncomment the next line if you know exactly what Account and Region you - # want to deploy the stack to. */ - # env=cdk.Environment(account='123456789012', region='us-east-1'), - # For more information, see https://docs.aws.amazon.com/cdk/latest/guide/environments.html -) +rules_dir = Path(app.node.try_get_context("rules_dir")) +rulename_str: str = app.node.try_get_context("rulename") +rule_names = rulename_str.split("|") # Assumes a pipe-delimited list of rulenames +if not rule_names: + raise Exception("Need either --all or specific rule name(s).") + +for rule_name in rule_names: + CdkStack( + scope=app, + construct_id=rule_name.replace("_", ""), + rule_name=rule_name, + rules_dir=rules_dir, + # Suppresses Bootstrap-related conditions and metadata + synthesizer=DefaultStackSynthesizer(generate_bootstrap_version_rule=False), + ) app.synth() diff --git a/rdk/frameworks/cdk/cdk/cdk_stack.py b/rdk/frameworks/cdk/cdk/cdk_stack.py index 6a41ecdc..31169695 100644 --- a/rdk/frameworks/cdk/cdk/cdk_stack.py +++ b/rdk/frameworks/cdk/cdk/cdk_stack.py @@ -25,82 +25,93 @@ class CdkStack(Stack): - def __init__(self, scope: Construct, construct_id: str, **kwargs) -> None: + def __init__( + self, + scope: Construct, + construct_id: str, + rules_dir: Path, + rule_name: str, + **kwargs, + ) -> None: super().__init__(scope, construct_id, **kwargs) - rules_dir = Path(self.node.try_get_context("rules_dir")) - rules_list = get_deploy_rules_list(rules_dir) + rule_name: str = rule_name + # TODO - this is a hacky approach to this + # TODO - find a way to determine whether we're in the parent rule directory or + # an individual rule's directory + if rules_dir.name != rule_name: + rule_path: Path = rules_dir.joinpath(rule_name) + else: + rule_path = rules_dir - for rule_path in rules_list: - rule_name = get_rule_name(rule_path) - rule_parameters = get_rule_parameters(rule_path) - print(f"Adding Rule {rule_name} ...") - if "SourceRuntime" in rule_parameters["Parameters"] and rule_parameters[ - "Parameters" - ]["SourceRuntime"] in ["cloudformation-guard2.0", "guard-2.x.x"]: - arg = CustomPolicy( - policy_text=rule_path.joinpath("rule_code.guard").read_text(), - rule_parameters=rule_parameters, - ) - config.CustomPolicy(self, rule_name, **asdict(arg)) - elif ( - "SourceRuntime" in rule_parameters["Parameters"] - and rule_parameters["Parameters"]["SourceRuntime"] - in rdk_supported_custom_rule_runtime - ): - # Lambda function containing logic that evaluates compliance with the rule. - fn_arg = LambdaFunction( - code=lambda_.Code.from_asset(rule_path.as_posix()), - rule_parameters=rule_parameters, - ) - if "-lib" in rule_parameters["Parameters"]["SourceRuntime"]: - layer_version_arn = ( - fn_arg.get_latest_rdklib_lambda_layer_version_arn() - ) - latest_layer = lambda_.LayerVersion.from_layer_version_arn( - scope=self, - id="rdklayerversion", - layer_version_arn=layer_version_arn, - ) - # fn_arg.layers.append(latest_layer) - eval_compliance_fn = lambda_.Function( - self, - f"{rule_name}Function", - **asdict(fn_arg), - layers=[latest_layer], + rule_parameters = get_rule_parameters(rule_path) + if "SourceRuntime" in rule_parameters["Parameters"] and rule_parameters[ + "Parameters" + ]["SourceRuntime"] in ["cloudformation-guard2.0", "guard-2.x.x"]: + arg = CustomPolicy( + policy_text=rule_path.joinpath("rule_code.guard").read_text(), + rule_parameters=rule_parameters, + ) + config.CustomPolicy(self, rule_name, **asdict(arg)) + elif ( + "SourceRuntime" in rule_parameters["Parameters"] + and rule_parameters["Parameters"]["SourceRuntime"] + in rdk_supported_custom_rule_runtime + ): + # Lambda function containing logic that evaluates compliance with the rule. + fn_arg = LambdaFunction( + code=lambda_.Code.from_asset( + rule_path.as_posix(), exclude=["build"] + ), # Excluding build due to infinite loops + rule_parameters=rule_parameters, + ) + if "-lib" in rule_parameters["Parameters"]["SourceRuntime"]: + layer_version_arn = fn_arg.get_latest_rdklib_lambda_layer_version_arn() + latest_layer = lambda_.LayerVersion.from_layer_version_arn( + scope=self, + id="rdklayerversion", + layer_version_arn=layer_version_arn, ) + # fn_arg.layers.append(latest_layer) + eval_compliance_fn = lambda_.Function( + self, + f"{rule_name}Function", + **asdict(fn_arg), + layers=[latest_layer], + ) - # A custom rule that runs on configuration changes of EC2 instances - arg = CustomRule( - lambda_function=eval_compliance_fn, rule_parameters=rule_parameters - ) - config.CustomRule(self, rule_name, **asdict(arg)) - elif ( - "SourceIdentifier" in rule_parameters["Parameters"] - and rule_parameters["Parameters"]["SourceIdentifier"] - ): - arg = ManagedRule(rule_parameters=rule_parameters) - config.ManagedRule(self, rule_name, **asdict(arg)) - else: - print(f"Rule type not supported for Rule {rule_name}") - continue - # raise RdkRuleTypesInvalidError(f"Error loading parameters file for Rule {rule_name}") + # A custom rule that runs on configuration changes of EC2 instances + arg = CustomRule( + lambda_function=eval_compliance_fn, rule_parameters=rule_parameters + ) + config.CustomRule(self, rule_name, **asdict(arg)) + elif ( + "SourceIdentifier" in rule_parameters["Parameters"] + and rule_parameters["Parameters"]["SourceIdentifier"] + ): + arg = ManagedRule(rule_parameters=rule_parameters) + config.ManagedRule(self, rule_name, **asdict(arg)) + else: + print(f"Rule type not supported for Rule {rule_name}") + raise RdkRuleTypesInvalidError( + f"Error loading parameters file for Rule {rule_name}" + ) - if ( - "Remediation" in rule_parameters["Parameters"] - and rule_parameters["Parameters"]["Remediation"] - ): - arg = RemediationConfiguration(rule_parameters=rule_parameters) - config.CfnRemediationConfiguration( - self, f"{rule_name}RemediationConfiguration", **asdict(arg) - ) - # # A rule to detect stack drifts - # drift_rule = config.CloudFormationStackDriftDetectionCheck(self, "Drift") + if ( + "Remediation" in rule_parameters["Parameters"] + and rule_parameters["Parameters"]["Remediation"] + ): + arg = RemediationConfiguration(rule_parameters=rule_parameters) + config.CfnRemediationConfiguration( + self, f"{rule_name}RemediationConfiguration", **asdict(arg) + ) + # # A rule to detect stack drifts + # drift_rule = config.CloudFormationStackDriftDetectionCheck(self, "Drift") - # # Topic to which compliance notification events will be published - # compliance_topic = sns.Topic(self, "ComplianceTopic") + # # Topic to which compliance notification events will be published + # compliance_topic = sns.Topic(self, "ComplianceTopic") - # # Send notification on compliance change events - # drift_rule.on_compliance_change("ComplianceChange", - # target=targets.SnsTopic(compliance_topic) - # ) + # # Send notification on compliance change events + # drift_rule.on_compliance_change("ComplianceChange", + # target=targets.SnsTopic(compliance_topic) + # ) diff --git a/rdk/frameworks/cdk/cdk/core/rule_parameters.py b/rdk/frameworks/cdk/cdk/core/rule_parameters.py index 5f843ddc..bef7f5ad 100644 --- a/rdk/frameworks/cdk/cdk/core/rule_parameters.py +++ b/rdk/frameworks/cdk/cdk/core/rule_parameters.py @@ -1,4 +1,5 @@ import json +import os from pathlib import Path from aws_cdk import Stack diff --git a/rdk/frameworks/cdk/tests/integration/rdk-cdk-rule-dir/AMI_DEPRECATED_CHECK/parameters.json b/rdk/frameworks/cdk/tests/integration/rdk-cdk-rule-dir/AMI_DEPRECATED_CHECK/parameters.json new file mode 100644 index 00000000..0d0175ca --- /dev/null +++ b/rdk/frameworks/cdk/tests/integration/rdk-cdk-rule-dir/AMI_DEPRECATED_CHECK/parameters.json @@ -0,0 +1,13 @@ +{ + "Version": "1.0", + "Parameters": { + "RuleName": "AMI_DEPRECATED_CHECK", + "Description": "AMI_DEPRECATED_CHECK", + "SourceRuntime": "python3.8-lib", + "CodeKey": "AMI_DEPRECATED_CHECK.zip", + "InputParameters": "{}", + "OptionalParameters": "{}", + "SourcePeriodic": "TwentyFour_Hours" + }, + "Tags": "[]" + } \ No newline at end of file diff --git a/rdk/frameworks/cdk/tests/integration/rdk-cdk-rule-dir/API_GATEWAY_PRIVATE/parameters.json b/rdk/frameworks/cdk/tests/integration/rdk-cdk-rule-dir/API_GATEWAY_PRIVATE/parameters.json new file mode 100644 index 00000000..fa47319b --- /dev/null +++ b/rdk/frameworks/cdk/tests/integration/rdk-cdk-rule-dir/API_GATEWAY_PRIVATE/parameters.json @@ -0,0 +1,45 @@ +{ + "Version": "1.0", + "Parameters": { + "RuleName": "API_GATEWAY_PRIVATE", + "Description": "API_GATEWAY_PRIVATE", + "SourceRuntime": "guard-2.x.x", + "InputParameters": "{}", + "OptionalParameters": "{}", + "SourcePeriodic": "TwentyFour_Hours", + "Remediation": { + "Automatic": true, + "ConfigRuleName": "TEST_rule", + "MaximumAutomaticAttempts": "2", + "Parameters": { + "AutomationAssumeRole": { + "StaticValue": { + "Values": [ + { "Fn::Sub": "arn:aws:iam::${AWS::AccountId}:role/sns-access" } + ] + } + }, + "Message": { + "StaticValue": { + "Values": ["hi"] + } + }, + "TopicArn": { + "StaticValue": { + "Values": [ + { + "Fn::Sub": "arn:aws:sns:${AWS::Region}:${AWS::AccountId}:rules-notification" + } + ] + } + } + }, + "ResourceType": "AWS::EC2::Instance", + "RetryAttemptSeconds": "2", + "TargetId": "AWS-PublishSNSNotification", + "TargetType": "SSM_DOCUMENT", + "TargetVersion": "1" + } + }, + "Tags": "[]" +} diff --git a/rdk/frameworks/cdk/tests/integration/rdk-cdk-rule-dir/API_GATEWAY_PRIVATE/rule_code.guard b/rdk/frameworks/cdk/tests/integration/rdk-cdk-rule-dir/API_GATEWAY_PRIVATE/rule_code.guard new file mode 100644 index 00000000..f2654974 --- /dev/null +++ b/rdk/frameworks/cdk/tests/integration/rdk-cdk-rule-dir/API_GATEWAY_PRIVATE/rule_code.guard @@ -0,0 +1,39 @@ +# +# Select all AWS::ApiGateway::RestApi resources +# present in the Resources section of the template. +# +let api_gws = Resources.*[ Type == 'AWS::ApiGateway::RestApi'] + +# +# Rule intent: +# 1) All AWS::ApiGateway::RestApi resources deployed must be private. +# 2) All AWS::ApiGateway::RestApi resources deployed must have at least one AWS Identity and Access Management (IAM) policy condition key to allow access from a VPC. +# +# Expectations: +# 1) SKIP when there are no AWS::ApiGateway::RestApi resources in the template. +# 2) PASS when: +# ALL AWS::ApiGateway::RestApi resources in the template have the EndpointConfiguration property set to Type: PRIVATE. +# ALL AWS::ApiGateway::RestApi resources in the template have one IAM condition key specified in the Policy property with aws:sourceVpc or :SourceVpc. +# 3) FAIL otherwise. +# +# + +rule check_rest_api_is_private when %api_gws !empty { + %api_gws { + Properties.EndpointConfiguration.Types[*] == "PRIVATE" + } +} + +rule check_rest_api_has_vpc_access when check_rest_api_is_private { + %api_gws { + Properties { + # + # ALL AWS::ApiGateway::RestApi resources in the template have one IAM condition key specified in the Policy property with + # aws:sourceVpc or :SourceVpc + # + some Policy.Statement[*] { + Condition.*[ keys == /aws:[sS]ource(Vpc|VPC|Vpce|VPCE)/ ] !empty + } + } + } +} \ No newline at end of file diff --git a/rdk/frameworks/cdk/tests/integration/rdk-cdk-rule-dir/ENCRYPTED_VOLUMES/parameters.json b/rdk/frameworks/cdk/tests/integration/rdk-cdk-rule-dir/ENCRYPTED_VOLUMES/parameters.json new file mode 100644 index 00000000..d179f358 --- /dev/null +++ b/rdk/frameworks/cdk/tests/integration/rdk-cdk-rule-dir/ENCRYPTED_VOLUMES/parameters.json @@ -0,0 +1,15 @@ +{ + "Version": "1.0", + "Parameters": { + "RuleName": "ENCRYPTED_VOLUMES", + "SourceRuntime": null, + "CodeKey": null, + "InputParameters": "{}", + "OptionalParameters": "{}", + "SourceEvents": "AWS::EC2::Volume", + "SourceIdentifier": "ENCRYPTED_VOLUMES", + "RuleSets": [ + "baseline" + ] + } +} diff --git a/rdk/frameworks/cdk/tests/integration/rdk-cdk-rule-dir/S3_BUCKET_SERVER_SIDE_ENCRYPTION_ENABLED/parameters.json b/rdk/frameworks/cdk/tests/integration/rdk-cdk-rule-dir/S3_BUCKET_SERVER_SIDE_ENCRYPTION_ENABLED/parameters.json new file mode 100644 index 00000000..7a1d7bff --- /dev/null +++ b/rdk/frameworks/cdk/tests/integration/rdk-cdk-rule-dir/S3_BUCKET_SERVER_SIDE_ENCRYPTION_ENABLED/parameters.json @@ -0,0 +1,15 @@ +{ + "Version": "1.0", + "Parameters": { + "RuleName": "S3_BUCKET_SERVER_SIDE_ENCRYPTION_ENABLED", + "SourceRuntime": null, + "CodeKey": null, + "InputParameters": "{}", + "OptionalParameters": "{}", + "SourceEvents": "AWS::S3::Bucket", + "SourceIdentifier": "S3_BUCKET_SERVER_SIDE_ENCRYPTION_ENABLED", + "RuleSets": [ + "baseline" + ] + } +} diff --git a/rdk/frameworks/cdk/tests/integration/rdk-cdk-rule-dir/SNS_TOPIC_IN_US_EAST_1_ONLY/parameters.json b/rdk/frameworks/cdk/tests/integration/rdk-cdk-rule-dir/SNS_TOPIC_IN_US_EAST_1_ONLY/parameters.json new file mode 100644 index 00000000..8375592b --- /dev/null +++ b/rdk/frameworks/cdk/tests/integration/rdk-cdk-rule-dir/SNS_TOPIC_IN_US_EAST_1_ONLY/parameters.json @@ -0,0 +1,12 @@ +{ + "Version": "1.0", + "Parameters": { + "RuleName": "SNS_TOPIC_IN_US_EAST_1_ONLY", + "Description": "SNS_TOPIC_IN_US_EAST_1_ONLY", + "SourceRuntime": "guard-2.x.x", + "InputParameters": "{}", + "OptionalParameters": "{}", + "SourcePeriodic": "TwentyFour_Hours" + }, + "Tags": "[]" + } \ No newline at end of file diff --git a/rdk/frameworks/cdk/tests/integration/rdk-cdk-rule-dir/SNS_TOPIC_IN_US_EAST_1_ONLY/rule_code.guard b/rdk/frameworks/cdk/tests/integration/rdk-cdk-rule-dir/SNS_TOPIC_IN_US_EAST_1_ONLY/rule_code.guard new file mode 100644 index 00000000..9ee40f78 --- /dev/null +++ b/rdk/frameworks/cdk/tests/integration/rdk-cdk-rule-dir/SNS_TOPIC_IN_US_EAST_1_ONLY/rule_code.guard @@ -0,0 +1,3 @@ +rule MyCustomPolicy when resourceType IN ['AWS::SNS::Topic'] { + awsRegion == "us-east-1" +} \ No newline at end of file diff --git a/rdk/runners/base.py b/rdk/runners/base.py index db6bdcf3..6577da94 100644 --- a/rdk/runners/base.py +++ b/rdk/runners/base.py @@ -82,6 +82,7 @@ def run_cmd( # pylint: disable=too-many-arguments,too-many-locals,too-many-stat # We're only dealing with text streams for now "universal_newlines": True, "shell": True, # Added to make this work for Windows environments + "encoding": "utf8", } if cwd: subprocess_popen_kwargs["cwd"] = cwd @@ -89,15 +90,13 @@ def run_cmd( # pylint: disable=too-many-arguments,too-many-locals,too-many-stat subprocess_popen_kwargs["env"] = env # Command output log handling flags - # By default, we log stderr as INFO, - # but relog it as ERROR upon a failure + # loglevel_stdout = logging.INFO - loglevel_stderr = logging.INFO + loglevel_stderr = logging.ERROR # What are we doing with outputs? - if capture_output or discard_output: + if discard_output: loglevel_stdout = logging.DEBUG - loglevel_stderr = logging.ERROR # Linter ignores: # * mypy is not happy about `subprocess_run_kwargs`, it thinks we are @@ -113,77 +112,51 @@ def run_cmd( # pylint: disable=too-many-arguments,too-many-locals,too-many-stat # Run try: - with subprocess.Popen(**subprocess_popen_kwargs) as process: # type: ignore[call-overload] # nosec - # Read stdout and stderr streams - selctr = selectors.DefaultSelector() - for _maybe_fileobj in [process.stdout, process.stderr]: - if _maybe_fileobj is not None: - selctr.register( - fileobj=_maybe_fileobj, events=selectors.EVENT_READ + process = subprocess.Popen(**subprocess_popen_kwargs) # type: ignore[call-overload] # nosec + + # Parse and log response from the process communicate() + def print_parsed_response(input_data: tuple): + for input_response in input_data: + if not input_response: + continue + # Prettify lines + _line_no_escapes = re.sub( + # Remove all escape sequences + # https://superuser.com/a/380778 + r"\x1b\[[0-9;]*[a-zA-Z]", + "", + input_response, + ) + _line_rstripped = _line_no_escapes.rstrip() + _line_stripped = _line_no_escapes.strip() + + if _line_stripped: # and capture_output + captured_stdout_lines.append(_line_stripped) + log_level = loglevel_stdout + if re.search("Error", input_response): + log_level = logging.ERROR + if _line_rstripped: + self.logger.log( + level=log_level, + msg=_line_rstripped, ) - def _log_streams(is_final: bool = False): - """ - Log stuff based on stdout or stderr. - """ - # Log streaming currently fails for Windows - # OSError: [WinError 10038] An operation was attempted on something that is not a socket - # Capture the error for now. - try: - selctr.select() - except Exception as ex: - return # TODO - actually implement this for Windows - for _selkey, _ in selctr.select(): - # NOTE: Selector key can be empty - if _selkey: - this_fileobj: TextIO = _selkey.fileobj # type: ignore - for _line in iter(this_fileobj.readline, ""): - # Fixup lines - _line_no_escapes = re.sub( - # Remove all escape sequences - # https://superuser.com/a/380778 - r"\x1b\[[0-9;]*[a-zA-Z]", - "", - _line, - ) - _line_rstripped = _line_no_escapes.rstrip() - _line_stripped = _line_no_escapes.strip() - - # Decide what to do with them ... - if this_fileobj is process.stdout: - # This line is a stdout - if capture_output and _line_stripped: - captured_stdout_lines.append(_line_stripped) - if _line_rstripped: - self.logger.log( - level=loglevel_stdout, - msg=_line_rstripped, - ) - if this_fileobj is process.stderr: - # This line is a stderr - if _line_stripped: - captured_stderr_lines.append(_line_stripped) - if _line_rstripped: - self.logger.log( - level=loglevel_stderr, - msg=_line_rstripped, - ) - - # If this is not the final call, iterate - # over each selector alternatively - if not is_final: - break - - # Process streams while the command is running - while process.poll() is None: - _log_streams() - - # Again, if stuff is leftover in the fd's - time.sleep(0.10) - _log_streams(is_final=True) - - # Get return code - return_code = process.returncode + def _log_streams(): + output = process.communicate() + print_parsed_response(output) + # print_parsed_response(stderr, loglevel_stderr) + return + + # Process streams while the command is running + while process.poll() is None: + _log_streams() + + # Again, if stuff is leftover in the file descriptors + # time.sleep(0.10) + # _log_streams() + + # Get return code + return_code = process.returncode except Exception as exc: self.logger.exception(exc) raise RdkCommandInvokeError("Failed to invoke requested command") from exc @@ -200,7 +173,7 @@ def _log_streams(is_final: bool = False): if capture_output: return "\n".join(captured_stdout_lines) - return "OK" + return return_code def get_python_executable(self) -> str: # pylint: disable=no-self-use """ diff --git a/rdk/runners/cdk.py b/rdk/runners/cdk.py index b481f310..9a0751d7 100644 --- a/rdk/runners/cdk.py +++ b/rdk/runners/cdk.py @@ -1,13 +1,22 @@ +import boto3 import copy import json import os +import re import shutil +import sys from dataclasses import dataclass, field from pathlib import Path from typing import Any, Dict, List, Optional +from botocore.exceptions import ClientError import rdk as this_pkg from rdk.runners.base import BaseRunner +from rdk.core.errors import ( + RdkCommandExecutionError, + RdkCommandInvokeError, + RdkCommandNotAllowedError, +) @dataclass @@ -25,18 +34,38 @@ class CdkRunner(BaseRunner): rules_dir: Path cdk_app_dir: Path = field(init=False) + rulenames: List[str] def __post_init__(self): super().__post_init__() - # cdk_source_dir = Path(__file__).resolve().parent.parent /'frameworks' / 'cdk' - # self.logger.info("Getting latest deployment framework from " + cdk_source_dir.as_posix()) - # self.logger.info("Deploying latest deployment framework in " + self.root_module.as_posix()) - # shutil.rmtree(self.root_module / "cdk") - # shutil.copytree(Path(__file__).resolve().parent.parent /'frameworks' / 'cdk', self.root_module / 'cdk') - # self.cdk_app_dir = self.root_module / "cdk" - # TODO - should this actually be the CDK application's path? I don't understand what Ricky was doing here. + # The CDK app is a standard app that takes rule definitions as context self.cdk_app_dir = Path(__file__).resolve().parent.parent / "frameworks" / "cdk" + def get_context_args( + self, + # Rule names generally aren't included for CDK Diff + include_rulenames=True, + # To override the list of rulenames, eg. for only deploying a subset that need changes + rulenames_override=[], + ): + context_args = [ + "--version-reporting", # Setting this to false will exclude CDK Metadata + "false", + "--context", + "rules_dir=" + self.rules_dir.as_posix(), + "--context", + ] + + if include_rulenames: + if rulenames_override: + deploy_rulenames = rulenames_override + else: + deploy_rulenames = self.rulenames + context_args.append( + f"\"rulename={'|'.join(deploy_rulenames)}\"", + ) + return context_args + def synthesize(self): """ Executes `cdk synth`. @@ -47,54 +76,94 @@ def synthesize(self): cmd = [ "cdk", "synth", + "--quiet", # TODO - does it make sense to not write to cdk.out? "--validation", - "--output", - self.rules_dir.joinpath("build/").as_posix(), - # "--version-reporting", - # "false", - # "--path-metadata", - # "false", - # "--asset-metadata", - # "false", - "--context", - "rules_dir=" + self.rules_dir.as_posix(), + # "--output", + # self.rules_dir.joinpath("build/").as_posix(), ] + cmd += self.get_context_args() self.logger.info("Synthesizing CloudFormation template(s)...") + self.logger.debug(f"Running cmd {cmd} in directory {self.cdk_app_dir.as_posix()}...") + self.run_cmd( cmd=cmd, cwd=self.cdk_app_dir.as_posix(), allowed_return_codes=[0, 2], ) - def diff(self): + def get_deployed_stacks(self): """ - Executes `cdk diff`. - - Parameters: + This is used to determine which stacks need to be CDK Diff'd vs which are fresh deploys + """ + rule_names = self.rulenames + existing_stack_rule_names = {} + missing_stack_rule_names = {} + for rule_name in rule_names: + try: + existing_stack_rule_names[rule_name] = ( + boto3.client("cloudformation") + .describe_stacks(StackName=rule_name.replace("_", "")) + .get("Stacks", [])[0] + .get("StackName") + ) + except ClientError as ex: + self.logger.error(repr(ex)) + # Continue if stack is not found + if re.search("(ValidationError)", repr(ex)): + self.logger.info(f"Stack {rule_name.replace('_', '')} not found, adding to missing stack list") + missing_stack_rule_names[rule_name] = rule_name.replace("_", "") + continue + raise RdkCommandExecutionError("Unable to determine if stack exists.") + return existing_stack_rule_names, missing_stack_rule_names + def diff(self): """ - cmd = [ - "cdk", - "diff", - "--context", - "rules_dir=" + self.rules_dir.as_posix(), - ] + Executes `cdk diff`. - self.logger.info( - f"Showing differences on CloudFormation template(s) for rule {self.rules_dir.as_posix()}..." - ) + The intention of this execution is to determine whether a full run of RDK deploy is required. - self.logger.info( - f"Running cmd {cmd} in directory {self.cdk_app_dir.as_posix()}..." - ) + If a stack has no differences compared to the deployed stack, it should be skipped. Otherwise, it should be redeployed. - self.run_cmd( - cmd=cmd, - cwd=self.cdk_app_dir.as_posix(), - allowed_return_codes=[0, 2], - ) + Parameters: + None + """ + stacks_with_diffs = [] # Keep a list of which stacks need to be updated + deployed_stacks, missing_stacks = self.get_deployed_stacks() + if not deployed_stacks and not missing_stacks: + self.logger.info("No stacks requiring updates found for the given inputs.") + return + # Always deploy stacks if there is no existing Stack + for missing_stack in missing_stacks.keys(): + stacks_with_diffs.append(missing_stack) + # Review each deployed stack and compare it to the current template + for deployed_stack in deployed_stacks.keys(): + cmd = [ + "cdk", + "diff", + "--fail", + deployed_stacks[ + deployed_stack + ], # Use the map of rule name to stack name, since they're used in different but related contexts + ] + context = self.get_context_args(include_rulenames=False) + context.append(f"rulename={deployed_stack}") + cmd += context + + self.logger.info(f"Showing differences on CloudFormation template(s) {deployed_stack}...") + + self.logger.debug(f"Running cmd {cmd} in directory {self.cdk_app_dir.as_posix()}...") + + return_code = self.run_cmd( + cmd=cmd, + cwd=self.cdk_app_dir.as_posix(), + allowed_return_codes=[0, 1, 2], + ) + if return_code == 1: + stacks_with_diffs.append(deployed_stack) + # Send a list of stack names to the caller + return stacks_with_diffs def bootstrap(self): """ @@ -106,9 +175,8 @@ def bootstrap(self): cmd = [ "cdk", "bootstrap", - "--context", - "rules_dir=" + self.rules_dir.as_posix(), ] + cmd += self.get_context_args() self.logger.info("Environment Bootstrapping ...") @@ -118,26 +186,30 @@ def bootstrap(self): allowed_return_codes=[0, 2], ) - def deploy(self): + def deploy(self, stacks_to_deploy): """ Executes `cdk deploy`. Parameters: - + stacks_to_deploy: A list of stack names to deploy. """ cmd = [ "cdk", "deploy", + " ".join(stacks_to_deploy).replace("_", ""), "--app", (self.cdk_app_dir / "cdk.out").as_posix(), - "--context", - "rules_dir=" + self.rules_dir.as_posix(), "--require-approval", "never", ] + cmd += self.get_context_args( + rulenames_override=stacks_to_deploy, + ) self.logger.info("Deploying AWS Config Rules ...") + self.logger.debug(f"Running cmd {cmd} in directory {self.cdk_app_dir.as_posix()}...") + self.run_cmd( cmd=cmd, cwd=self.cdk_app_dir.as_posix(), @@ -154,13 +226,14 @@ def destroy(self): cmd = [ "cdk", "destroy", - "--context", - "rules_dir=" + self.rules_dir.as_posix(), "--force", ] + cmd += self.get_context_args() self.logger.info("Destroying AWS Config Rules ...") + self.logger.debug(f"Running cmd {cmd} in directory {self.cdk_app_dir.as_posix()}...") + self.run_cmd( cmd=cmd, cwd=self.cdk_app_dir.as_posix(), diff --git a/setup.py b/setup.py deleted file mode 100644 index 062b8b23..00000000 --- a/setup.py +++ /dev/null @@ -1,43 +0,0 @@ -from setuptools import find_packages, setup - -import rdk as this_pkg - -setup( - name=this_pkg.DIST_NAME, - version=this_pkg.VERSION, - description=this_pkg.DESCRIPTION, - long_description=this_pkg.DESCRIPTION, - long_description_content_type="text/plain", - url=this_pkg.URL, - license="Apache-2.0", - author=this_pkg.MAINTAINER, - author_email=this_pkg.MAINTAINER_EMAIL, - maintainer=this_pkg.MAINTAINER, - maintainer_email=this_pkg.MAINTAINER_EMAIL, - python_requires=">=3.8", - zip_safe=False, - packages=find_packages(include=[f"{this_pkg.NAME}", f"{this_pkg.NAME}.*"]), - package_data={ - this_pkg.NAME: ["py.typed"], - }, - install_requires=[ - "aiofiles<1", - # "aws-cdk<2", - "aws-cdk-lib>=2", - "constructs>=10,<11", - # "boto3>=1,<2", - # "c1-p13rlib>=2,<3", - # "c7n", - "colorlog>=4,<5", - "httpx<1", - "mergedeep>=1,<2", - "pytest>=6,<7", - "semver>=2,<3", - ], - entry_points={ - "console_scripts": [ - f"{this_pkg.CLI_NAME}={this_pkg.NAME}.cli.main:main", - ], - "pytest11": [f"pytest_{this_pkg.NAME}={this_pkg.NAME}.pytest.fixture"], - }, -) diff --git a/sonar-project.properties b/sonar-project.properties deleted file mode 100644 index 4ce7d7c9..00000000 --- a/sonar-project.properties +++ /dev/null @@ -1,38 +0,0 @@ -################################################################################ -# SONAR ANALYSIS PARAMETERS -################################################################################ - -# -# Reference: -# https://docs.sonarqube.org/7.9/analysis/analysis-parameters/ -# https://docs.sonarqube.org/7.9/analysis/coverage/ -# https://docs.sonarqube.org/7.9/analysis/external-issues/ -# https://docs.sonarqube.org/7.9/analysis/languages/python/ -# https://docs.sonarqube.org/7.9/analysis/scan/sonarscanner/ -# https://docs.sonarqube.org/7.9/branches/overview/ -# - -# Server Configs -sonar.host.url=https://sonar.cloud.example.com/ - -# Project Configs -sonar.projectKey=rdk -sonar.projectName=rdk -sonar.projectVersion=latest -sonar.projectDescription=CLI to Test Cloud Custodian Policies -sonar.links.homepage=https://github.com/awslabs/aws-config-rdk - -# Repo Configs -sonar.sourceEncoding=UTF-8 -sonar.scm.provider=git - -# Paths -sonar.tests=tests/unit - -# Reports -sonar.python.bandit.reportPaths=.reports/bandit.json -sonar.python.coverage.reportPaths=.reports/coverage.xml -sonar.python.pylint.reportPath=.reports/pylint-sonar.txt -sonar.python.xunit.reportPath=.reports/junit.xml - -################################################################################ diff --git a/tools/ci/bin/init-snapshot.sh b/tools/ci/bin/init-snapshot.sh deleted file mode 100755 index 802e05eb..00000000 --- a/tools/ci/bin/init-snapshot.sh +++ /dev/null @@ -1,83 +0,0 @@ -#!/usr/bin/env bash - -################################################################################ -# MAKE HELPER TO CHECK WHETHER `make init` NEEDS TO RUN -################################################################################ - -# Bash Option -set -e -set -o pipefail -export IFS=$'\n' - -# err -function _kaput() { - echo "$@" >&2 - exit 1 -} - -# Skip if we are running on jenkins -# jenkins/bogie has a really old version of pipenv that -# is not init-snapshot compatible. -# Also, on jenkins, we're always starting with a fresh init anyways -if test -n "${BUILD_URL}" || [[ "${CI}" == "Jenkins" ]]; then - exit 0 -fi - -# Read operation -declare operation="check" -if [[ $# -gt 0 ]]; then - if [[ "${1}" == "save" ]]; then - operation="save" - fi -fi - -# get reporoot -declare reporoot -reporoot=$(git rev-parse --show-toplevel) \ - || _kaput "Failed to get repository root" - -# Things to include in snapshot -declare -a snapshot_things -for _file in \ - "${reporoot}/Makefile" \ - "${reporoot}/requirements.txt" \ - "${reporoot}/Pipfile.lock" \ - "${reporoot}/.python-version"; do - if test -f "${_file}"; then - snapshot_things+=("${_file}") - fi -done -while IFS= read -r -d '' _file; do - snapshot_things+=("${_file}") -done < <(find "${reporoot}/tools/githooks" -type f -print0) - -# Calculate snapshot -declare snapshot -snapshot=$( - cat "${snapshot_things[@]}" \ - | openssl dgst -sha256 -) || _kaput "ERROR: Failed to calculate snapshot" - -# snapshot location -declare snapshot_root="${reporoot}/.venv" -declare snapshot_file="${snapshot_root}/init-snapshot" - -# create/save -if [[ "${operation}" == "save" ]]; then - # create - mkdir -p "${snapshot_root}" \ - && echo "${snapshot}" > "${snapshot_file}" -else - # check - test -f "${snapshot_file}" || exit 2 - if [[ $(head -1 "${snapshot_file}") == "${snapshot}" ]]; then - exit 0 - else - exit 2 - fi -fi - -# Done -exit 0 - -################################################################################ diff --git a/tools/ci/bin/install-tools-on-jenkins.sh b/tools/ci/bin/install-tools-on-jenkins.sh deleted file mode 100644 index 19eccbd0..00000000 --- a/tools/ci/bin/install-tools-on-jenkins.sh +++ /dev/null @@ -1,49 +0,0 @@ -#!/usr/bin/env bash - -################################################################################ -# INSTALL TOOLS NEEDED FOR JENKINS JOBS -################################################################################ - -# Bash Options -set -e -set -o pipefail -export IFS=$'\n' - -# err -function _kaput() { - echo "$@" >&2 - exit 1 -} - -# Ensure we're running on Jenkins -if test -z "${BUILD_URL}"; then - _kaput "ERROR: This script should only run within a Jenkins job" -fi - -# Setup ~/.local -declare home_local="/home/git/.local" -if test -e "${home_local}"; then - chmod -R 0755 "${home_local}" - rm -rf "${home_local}" -fi -mkdir -p "${home_local}/bin" "${home_local}/lib" - -# Install pipenv -declare PYENV_VERSION -PYENV_VERSION=$(pyenv versions --bare | grep 3.8 | sort -n | tail -1) -export PYENV_VERSION -pip3 install \ - --index-url https://artifactory.cloud.example.com/artifactory/api/pypi/pypi-internalfacing/simple \ - --upgrade \ - --ignore-installed \ - --user \ - -- pipenv - -# Verify pipenv -hash -r -"${home_local}/bin/pipenv" --version - -# Done -exit 0 - -################################################################################ diff --git a/tools/docs/bin/generate-ref-cli.sh b/tools/docs/bin/generate-ref-cli.sh deleted file mode 100644 index 7248dea7..00000000 --- a/tools/docs/bin/generate-ref-cli.sh +++ /dev/null @@ -1,54 +0,0 @@ -#!/usr/bin/env bash - -################################################################################ -# GENERATE CLI REFERENCE -################################################################################ - -# Bash Options -set -e -set -o pipefail - -declare content_md="docs/reference/cli.md" - -# Init -echo > "${content_md}" - -# rdk -cat << '_EO_SECTION_START' >> "${content_md}" -## `rdk` - -```text -_EO_SECTION_START -rdk --help >> "${content_md}" 2>&1 -cat << '_EO_SECTION_END' >> "${content_md}" -``` - -_EO_SECTION_END - -# rdk-init -cat << '_EO_SECTION_START' >> "${content_md}" -## `rdk init` - -```text -_EO_SECTION_START -rdk init --help >> "${content_md}" 2>&1 -cat << '_EO_SECTION_END' >> "${content_md}" -``` - -_EO_SECTION_END - -# rdk-deploy -cat << '_EO_SECTION_START' >> "${content_md}" -## `rdk deploy` - -```text -_EO_SECTION_START -rdk test --help >> "${content_md}" 2>&1 -cat << '_EO_SECTION_END' >> "${content_md}" -``` -_EO_SECTION_END - -# Done -exit 0 - -################################################################################ diff --git a/tools/githooks/bin/commit-msg b/tools/githooks/bin/commit-msg deleted file mode 100755 index d7ed122f..00000000 --- a/tools/githooks/bin/commit-msg +++ /dev/null @@ -1,41 +0,0 @@ -#!/usr/bin/env bash - -############################################################################### -# commit-msg HOOK -############################################################################### - -# -# Based on: -# https://github.com/git/git/blob/master/templates/hooks--commit-msg.sample -# - -# Bash Options -set -eu -set -o pipefail - -# Read Arguments -declare commit_msg_file="${1}" - -# Do we have pipenv? -command -v "pipenv" > /dev/null 2>&1 \ - || exit 0 - -# Lint message -if ! pipenv run -- gitlint --staged --msg-filename "${commit_msg_file}"; then - cat << 'EO_BAD_COMMIT_MSG' - -The commit message provided is not compliant with Conventional Commits. - -For more information on writing commit messages, please review: -https://www.conventionalcommits.org/ - -To bypass this verification intentionally, you can use the `--no-verify` -option. For e.g., `git commit --no-verify -m "some commit message"` -EO_BAD_COMMIT_MSG - exit 1 -fi - -# Done -exit 0 - -############################################################################### diff --git a/tools/githooks/bin/pre-push b/tools/githooks/bin/pre-push deleted file mode 100755 index 4ab8ed09..00000000 --- a/tools/githooks/bin/pre-push +++ /dev/null @@ -1,77 +0,0 @@ -#!/usr/bin/env bash - -############################################################################### -# pre-push HOOK -############################################################################### - -# -# Based on: -# https://github.com/git/git/blob/master/templates/hooks--pre-push.sample -# - -# Bash Options -set -eu -set -o pipefail - -# Read arguments -declare remote_name="${1}" -declare remote_url="${2}" - -# Zero hash -declare zero -zero=$( - git hash-object --stdin < /dev/null \ - | tr "0-9a-z" "0" -) || exit 0 - -# Parse STDIN -while read -r local_ref local_oid remote_ref remote_oid; do - if [[ "${local_oid}" = "${zero}" ]]; then - # Handle delete - # Nothing to do - continue - fi - - # get commit range - range="" - - if [[ "${remote_oid}" = "${zero}" ]]; then - # New branch, examine all new commits - range="${remote_name}..${local_oid}" - else - # Update to existing branch, examine new commits - range="${remote_oid}..${local_oid}" - fi - - # lint commits - if [[ "${range}" != "" ]]; then - if command -v "pipenv" > /dev/null 2>&1; then - if ! pipenv run -- gitlint --ignore-stdin --commits "${range}"; then - - cat << 'EO_BAD_COMMIT_MSG' - -Commits being pushed are not compliant with Conventional Commits. - -For more information on writing commit messages, please review: -https://www.conventionalcommits.org/ - -To fix your commit messages, you can do one of the following: -* Use `git commit --ammend` if you have only one commit to fix -* Use `git rebase -i HEAD~n` to fix the last n commits. In the - interactive rebase session, you can choose `edit` to fix each - non-compliant commit. - -To bypass this verification intentionally, you can use the `--no-verify` -option. For e.g., `git push --no-verify` -EO_BAD_COMMIT_MSG - - exit 1 - fi - fi - fi -done - -# Done -exit 0 - -############################################################################### diff --git a/tools/githooks/etc/commit-template b/tools/githooks/etc/commit-template deleted file mode 100644 index 2173b593..00000000 --- a/tools/githooks/etc/commit-template +++ /dev/null @@ -1,32 +0,0 @@ - -# This repository requires that commit messages adhere to -# Conventional Commits (https://www.conventionalcommits.org/) - -# The general format of a commit message is: - -# [optional scope]: - -# [optional body] - -# [optional footer(s)] - -# For example: - -# feat: my shiny new feature - -# Lots of details here about my new feature - -# Refs: FOUNDRY-1234 - -# List of commonly used _types_ and their descriptions: - -# fix: A bugfix -# feat: A new feature -# chore: Routine maintenance -# docs: Documentation updates -# style: Formatting changes -# refactor: API Refactor. This indicates a breaking change -# test: Updates to test cases -# revert: Previous commits being reverted -# ci: Changes to CI configurations -# build: Release activity diff --git a/tox.ini b/tox.ini deleted file mode 100644 index 66a14670..00000000 --- a/tox.ini +++ /dev/null @@ -1,2 +0,0 @@ -[flake8] -max-line-length=140 \ No newline at end of file diff --git a/twine.pypirc b/twine.pypirc deleted file mode 100644 index 4f3ddb01..00000000 --- a/twine.pypirc +++ /dev/null @@ -1,17 +0,0 @@ -############################################################################### -# .pypirc USED BY twine -############################################################################### - -# -# Reference: -# https://packaging.python.org/specifications/pypirc/ -# https://www.jfrog.com/confluence/display/JFROG/PyPI+Repositories#PyPIRepositories-PublishingtoArtifactory -# - -[distutils] -index-servers = artifactory - -[artifactory] -repository = https://artifactory.cloud.example.com/artifactory/api/pypi/pypi-internalfacing - -############################################################################### From 64d8bd1f00502fad7c5452bfbc8562f0df8f4d4a Mon Sep 17 00:00:00 2001 From: Benjamin Morris <93620006+bmorrissirromb@users.noreply.github.com> Date: Wed, 21 Jun 2023 15:52:17 -0700 Subject: [PATCH 20/23] Delete README.rst --- README.rst | 391 ----------------------------------------------------- 1 file changed, 391 deletions(-) delete mode 100644 README.rst diff --git a/README.rst b/README.rst deleted file mode 100644 index 63e073de..00000000 --- a/README.rst +++ /dev/null @@ -1,391 +0,0 @@ -rdk -=== -|pypibadge| |downloadsbadge| - - -.. |pypibadge| image:: https://static.pepy.tech/personalized-badge/rdk?period=total&units=international_system&left_color=black&right_color=blue&left_text=downloads - :target: https://pepy.tech/project/rdk -.. |downloadsbadge| image:: https://img.shields.io/pypi/v/rdk - :alt: PyPI - - - -Rule Development Kit - -We greatly appreciate feedback and bug reports at rdk-maintainers@amazon.com! You may also create an issue on this repo. - -The RDK is designed to support a "Compliance-as-Code" workflow that is intuitive and productive. It abstracts away much of the undifferentiated heavy lifting associated with deploying AWS Config rules backed by custom lambda functions, and provides a streamlined develop-deploy-monitor iterative process. - -For complete documentation, including command reference, check out the `ReadTheDocs documentation `_. - -Getting Started -=============== -Uses python 3.7/3.8/3.9 and is installed via pip. Requires you to have an AWS account and sufficient permissions to manage the Config service, and to create S3 Buckets, Roles, and Lambda Functions. An AWS IAM Policy Document that describes the minimum necessary permissions can be found at ``policy/rdk-minimum-permissions.json``. - -Under the hood, rdk uses boto3 to make API calls to AWS, so you can set your credentials any way that boto3 recognizes (options 3 through 8 `here `_) or pass them in with the command-line parameters ``--profile``, ``--region``, ``--access-key-id``, or ``--secret-access-key`` - -If you just want to use the RDK, go ahead and install it using pip. - -:: - -$ pip install rdk - -Alternately, if you want to see the code and/or contribute you can clone the git repo, and then from the repo directory use pip to install the package. Use the ``-e`` flag to generate symlinks so that any edits you make will be reflected when you run the installed package. - -If you are going to author your Lambda functions using Java you will need to have Java 8 and gradle installed. If you are going to author your Lambda functions in C# you will need to have the dotnet CLI and the .NET Core Runtime 1.08 installed. - -:: - - $ pip install -e . - -To make sure the rdk is installed correctly, running the package from the command line without any arguments should display help information. - -:: - - $ rdk - usage: rdk [-h] [-p PROFILE] [-k ACCESS_KEY_ID] [-s SECRET_ACCESS_KEY] - [-r REGION] [-f REGION_FILE] [--region-set REGION_SET] - [-v] ... - rdk: error: the following arguments are required: , - - -Usage -===== - -Configure your env ------------------- -To use the RDK, it's recommended to create a directory that will be your working directory. This should be committed to a source code repo, and ideally created as a python virtualenv. In that directory, run the ``init`` command to set up your AWS Config environment. - -:: - - $ rdk init - Running init! - Creating Config bucket config-bucket-780784666283 - Creating IAM role config-role - Waiting for IAM role to propagate - Config Service is ON - Config setup complete. - Creating Code bucket config-rule-code-bucket-780784666283ap-southeast-1 - -Running ``init`` subsequent times will validate your AWS Config setup and re-create any S3 buckets or IAM resources that are needed. - -- If you have config delivery bucket already present in some other AWS account then use ``--config-bucket-exists-in-another-account`` as argument. - -:: - - $ rdk init --config-bucket-exists-in-another-account -- If you have AWS Organizations/ControlTower Setup in your AWS environment then additionally, use ``--control-tower`` as argument. - -:: - - $ rdk init --control-tower --config-bucket-exists-in-another-account -- If bucket for custom lambda code is already present in current account then use ``--skip-code-bucket-creation`` argument. - -:: - - $ rdk init --skip-code-bucket-creation - -- If you want rdk to create/update and upload the rdklib-layer for you, then use ``--generate-lambda-layer`` argument. In supported regions, rdk will deploy the layer using the Serverless Application Repository, otherwise it will build a local lambda layer archive and upload it for use. - -:: - - $ rdk init --generate-lambda-layer -- If you want rdk to give a custom name to the lambda layer for you, then use ``--custom-layer-namer`` argument. The Serverless Application Repository currently cannot be used for custom lambda layers. - -:: - - $ rdk init --generate-lambda-layer --custom-layer-name - -Create Rules ------------- -In your working directory, use the ``create`` command to start creating a new custom rule. You must specify the runtime for the lambda function that will back the Rule, and you can also specify a resource type (or comma-separated list of types) that the Rule will evaluate or a maximum frequency for a periodic rule. This will add a new directory for the rule and populate it with several files, including a skeleton of your Lambda code. - -:: - - $ rdk create MyRule --runtime python3.8 --resource-types AWS::EC2::Instance --input-parameters '{"desiredInstanceType":"t2.micro"}' - Running create! - Local Rule files created. - -On Windows it is necessary to escape the double-quotes when specifying input parameters, so the ``--input-parameters`` argument would instead look something like this:: - - '{\"desiredInstanceType\":\"t2.micro\"}' - -Note that you can create rules that use EITHER resource-types OR maximum-frequency, but not both. We have found that rules that try to be both event-triggered as well as periodic wind up being very complicated and so we do not recommend it as a best practice. - -Edit Rules Locally ---------------------------- -Once you have created the rule, edit the python file in your rule directory (in the above example it would be ``MyRule/MyRule.py``, but may be deeper into the rule directory tree depending on your chosen Lambda runtime) to add whatever logic your Rule requires in the ``evaluate_compliance`` function. You will have access to the CI that was sent by Config, as well as any parameters configured for the Config Rule. Your function should return either a simple compliance status (one of ``COMPLIANT``, ``NON_COMPLIANT``, or ``NOT_APPLICABLE``), or if you're using the python or node runtimes you can return a JSON object with multiple evaluation responses that the RDK will send back to AWS Config. An example would look like - -:: - - for sg in response['SecurityGroups']: - evaluations.append( - { - 'ComplianceResourceType': 'AWS::EC2::SecurityGroup', - 'ComplianceResourceId': sg['GroupId'], - 'ComplianceType': 'COMPLIANT', - 'Annotation': 'This is an important note.', - 'OrderingTimestamp': str(datetime.datetime.now()) - }) - - - return evaluations - -This is necessary for periodic rules that are not triggered by any CI change (which means the CI that is passed in will be null), and also for attaching annotations to your evaluation results. - -If you want to see what the JSON structure of a CI looks like for creating your logic, you can use - -:: - -$ rdk sample-ci - -to output a formatted JSON document. - -Write and Run Unit Tests ------------------------- -If you are writing Config Rules using either of the Python runtimes there will be a ``_test.py`` file deployed along with your Lambda function skeleton. This can be used to write unit tests according to the standard Python unittest framework (documented `here `_), which can be run using the ``test-local`` rdk command:: - - $ rdk test-local MyTestRule - Running local test! - Testing MyTestRule - Looking for tests in /Users/mborch/Code/rdk-dev/MyTestRule - - --------------------------------------------------------------------- - - Ran 0 tests in 0.000s - - OK - - -The test file includes setup for the MagicMock library that can be used to stub boto3 API calls if your rule logic will involve making API calls to gather additional information about your AWS environment. For some tips on how to do this, check out this blog post: https://sgillies.net/2017/10/19/mock-is-magic.html - -Modify Rule ------------ -If you need to change the parameters of a Config rule in your working directory you can use the ``modify`` command. Any parameters you specify will overwrite existing values, any that you do not specify will not be changed. - -:: - - $ rdk modify MyRule --runtime python3.9 --maximum-frequency TwentyFour_Hours --input-parameters '{"desiredInstanceType":"t2.micro"}' - Running modify! - Modified Rule 'MyRule'. Use the `deploy` command to push your changes to AWS. - -Again, on Windows the input parameters would look like:: - - '{\"desiredInstanceType\":\"t2.micro\"}' - -It is worth noting that until you actually call the ``deploy`` command your rule only exists in your working directory, none of the Rule commands discussed thus far actually makes changes to your account. - -Deploy Rule ------------ -Once you have completed your compliance validation code and set your Rule's configuration, you can deploy the Rule to your account using the ``deploy`` command. This will zip up your code (and the other associated code files, if any) into a deployable package (or run a gradle build if you have selected the java8 runtime or run the lambda packaging step from the dotnet CLI if you have selected the dotnetcore1.0 runtime), copy that zip file to S3, and then launch or update a CloudFormation stack that defines your Config Rule, Lambda function, and the necessary permissions and IAM Roles for it to function. Since CloudFormation does not deeply inspect Lambda code objects in S3 to construct its changeset, the ``deploy`` command will also directly update the Lambda function for any subsequent deployments to make sure code changes are propagated correctly. - -:: - - $ rdk deploy MyRule - Running deploy! - Zipping MyRule - Uploading MyRule - Creating CloudFormation Stack for MyRule - Waiting for CloudFormation stack operation to complete... - ... - Waiting for CloudFormation stack operation to complete... - Config deploy complete. - -The exact output will vary depending on Lambda runtime. You can use the ``--all`` flag to deploy all of the rules in your working directory. If you used the ``--generate-lambda-layer`` flag in rdk init, use the ``--generated-lambda-layer`` flag for rdk deploy. - -Deploy Organization Rule ------------------------- -You can also deploy the Rule to your AWS Organization using the ``deploy-organization`` command. -For successful evaluation of custom rules in child accounts, please make sure you do one of the following: - -1. Set ASSUME_ROLE_MODE in Lambda code to True, to get the lambda to assume the Role attached on the Config Service and confirm that the role trusts the master account where the Lambda function is going to be deployed. -2. Set ASSUME_ROLE_MODE in Lambda code to True, to get the lambda to assume a custom role and define an optional parameter with key as ExecutionRoleName and set the value to your custom role name; confirm that the role trusts the master account of the organization where the Lambda function will be deployed. - -:: - - $ rdk deploy-organization MyRule - Running deploy! - Zipping MyRule - Uploading MyRule - Creating CloudFormation Stack for MyRule - Waiting for CloudFormation stack operation to complete... - ... - Waiting for CloudFormation stack operation to complete... - Config deploy complete. - -The exact output will vary depending on Lambda runtime. You can use the ``--all`` flag to deploy all of the rules in your working directory. -This command uses ``PutOrganizationConfigRule`` API for the rule deployment. If a new account joins an organization, the rule is deployed to that account. When an account leaves an organization, the rule is removed. Deployment of existing organizational AWS Config Rules will only be retried for 7 hours after an account is added to your organization if a recorder is not available. You are expected to create a recorder if one doesn't exist within 7 hours of adding an account to your organization. - -View Logs For Deployed Rule ---------------------------- -Once the Rule has been deployed to AWS you can get the CloudWatch logs associated with your lambda function using the ``logs`` command. - -:: - - $ rdk logs MyRule -n 5 - 2017-11-15 22:59:33 - START RequestId: 96e7639a-ca15-11e7-95a2-b1521890638d Version: $LATEST - 2017-11-15 23:41:13 - REPORT RequestId: 68e0304f-ca1b-11e7-b735-81ebae95acda Duration: 0.50 ms Billed Duration: 100 ms Memory Size: 256 MB - Max Memory Used: 36 MB - 2017-11-15 23:41:13 - END RequestId: 68e0304f-ca1b-11e7-b735-81ebae95acda - 2017-11-15 23:41:13 - Default RDK utility class does not yet support Scheduled Notifications. - 2017-11-15 23:41:13 - START RequestId: 68e0304f-ca1b-11e7-b735-81ebae95acda Version: $LATEST - -You can use the ``-n`` and ``-f`` command line flags just like the UNIX ``tail`` command to view a larger number of log events and to continuously poll for new events. The latter option can be useful in conjunction with manually initiating Config Evaluations for your deploy Config Rule to make sure it is behaving as expected. - - - -Running the tests -================= - -The ``testing`` directory contains scripts and buildspec files that I use to run basic functionality tests across a variety of CLI environments (currently Ubuntu linux running python 3.7/3.8/3.9, and Windows Server running python3.9). If there is interest I can release a CloudFormation template that could be used to build the test environment, let me know if this is something you want! - - -Advanced Features -================= -Cross-Account Deployments -------------------------- -Features have been added to the RDK to facilitate the cross-account deployment pattern that enterprise customers have standardized for custom Config Rules. A cross-account architecture is one in which the Lambda functions are deployed to a single central "Compliance" account (which may be the same as a central "Security" account), and the Config Rules are deployed to any number of "Satellite" accounts that are used by other teams or departments. This gives the compliance team confidence that their rule logic cannot be tampered with and makes it much easier for them to modify rule logic without having to go through a complex deployment process to potentially hundreds of AWS accounts. The cross-account pattern uses two advanced RDK features - -- Functions-only deployment -- create-rule-template command - -**Functions-Only Deployment** - -By using the ``-f`` or ``--functions-only`` flag on the ``deploy`` command the RDK will deploy only the necessary Lambda Functions, Lambda Execution Role, and Lambda Permissions to the account specified by the execution credentials. It accomplishes this by batching up all of the Lambda function CloudFormation snippets for the selected Rule(s) into a single dynamically generated template and deploy that CloudFormation template. One consequence of this is that subsequent deployments that specify a different set of rules for the same stack name will update that CloudFormation stack, and any Rules that were included in the first deployment but not in the second will be removed. You can use the ``--stack-name`` parameter to override the default CloudFormation stack name if you need to manage different subsets of your Lambda Functions independently. The intended usage is to deploy the functions for all of the Config rules in the Security/Compliance account, which can be done simply by using ``rdk deploy -f --all`` from your working directory. - -**create-rule-template command** - -This command generates a CloudFormation template that defines the AWS Config rules themselves, along with the Config Role, Config data bucket, Configuration Recorder, and Delivery channel necessary for the Config rules to work in a satellite account. You must specify the file name for the generated template using the `--output-file` or `o` command line flags. The generated template takes a single parameter of the AccountID of the central compliance account that contains the Lambda functions that will back your custom Config Rules. The generated template can be deployed in the desired satellite accounts through any of the means that you can deploy any other CloudFormation template, including the console, the CLI, as a CodePipeline task, or using StackSets. The ``create-rule-template`` command takes all of the standard arguments for selecting Rules to include in the generated template, including lists of individual Rule names, an ``--all`` flag, or using the RuleSets feature described below. - -:: - - $ rdk create-rule-template -o remote-rule-template.json --all - Generating CloudFormation template! - CloudFormation template written to remote-rule-template.json - - -Disable the supported resource types check ------------------------------------------- -It is now possible to define a resource type that is not yet supported by rdk. To disable the supported resource check use the optional flag '--skip-supported-resource-check' during the create command. - -:: - - $ rdk create MyRule --runtime python3.8 --resource-types AWS::New::ResourceType --skip-supported-resource-check - 'AWS::New::ResourceType' not found in list of accepted resource types. - Skip-Supported-Resource-Check Flag set (--skip-supported-resource-check), ignoring missing resource type error. - Running create! - Local Rule files created. - -Custom Lambda Function Name ---------------------------- -As of version 0.7.14, instead of defaulting the lambda function names to ``RDK-Rule-Function-`` it is possible to customize the name for the Lambda function to any 64 characters string as per Lambda's naming standards using the optional ``--custom-lambda-name`` flag while performing ``rdk create``. This opens up new features like : - -1. Longer config rule name. -2. Custom lambda function naming as per personal or enterprise standards. - -:: - - $ rdk create MyLongerRuleName --runtime python3.8 --resource-types AWS::EC2::Instance --custom-lambda-name custom-prefix-for-MyLongerRuleName - Running create! - Local Rule files created. - -The above example would create files with config rule name as ``MyLongerRuleName`` and lambda function with the name ``custom-prefix-for-MyLongerRuleName`` instead of ``RDK-Rule-Function-MyLongerRuleName`` - -RuleSets --------- -New as of version 0.3.11, it is possible to add RuleSet tags to rules that can be used to deploy and test groups of rules together. Rules can belong to multiple RuleSets, and RuleSet membership is stored only in the parameters.json metadata. The `deploy`, `create-rule-template`, and `test-local` commands are RuleSet-aware such that a RuleSet can be passed in as the target instead of `--all` or a specific named Rule. - -A comma-delimited list of RuleSets can be added to a Rule when you create it (using the ``--rulesets`` flag), as part of a ``modify`` command, or using new ``ruleset`` subcommands to add or remove individual rules from a RuleSet. - -Running ``rdk rulesets list`` will display a list of the RuleSets currently defined across all of the Rules in the working directory - -:: - - rdk-dev $ rdk rulesets list - RuleSets: AnotherRuleSet MyNewSet - -Naming a specific RuleSet will list all of the Rules that are part of that RuleSet. - -:: - - rdk-dev $ rdk rulesets list AnotherRuleSet - Rules in AnotherRuleSet : RSTest - -Rules can be added to or removed from RuleSets using the ``add`` and ``remove`` subcommands: - -:: - - rdk-dev $ rdk rulesets add MyNewSet RSTest - RSTest added to RuleSet MyNewSet - - rdk-dev $ rdk rulesets remove AnotherRuleSet RSTest - RSTest removed from RuleSet AnotherRuleSet - -RuleSets are a convenient way to maintain a single repository of Config Rules that may need to have subsets of them deployed to different environments. For example your development environment may contain some of the Rules that you run in Production but not all of them; RuleSets gives you a way to identify and selectively deploy the appropriate Rules to each environment. - -Managed Rules -------------- -The RDK is able to deploy AWS Managed Rules. - -To do so, create a rule using ``rdk create`` and provide a valid SourceIdentifier via the ``--source-identifier`` CLI option. The list of Managed Rules can be found `here `_ , and note that the Identifier can be obtained by replacing the dashes with underscores and using all capitals (for example, the "guardduty-enabled-centralized" rule has the SourceIdentifier "GUARDDUTY_ENABLED_CENTRALIZED"). Just like custom Rules you will need to specify source events and/or a maximum evaluation frequency, and also pass in any Rule parameters. The resulting Rule directory will contain only the parameters.json file, but using ``rdk deploy`` or ``rdk create-rule-template`` can be used to deploy the Managed Rule like any other Custom Rule. - -Deploying Rules Across Multiple Regions ---------------------------------------- -The RDK is able to run init/deploy/undeploy across multiple regions with a ``rdk -f -t `` - -If no region group is specified, rdk will deploy to the ``default`` region set. - -To create a sample starter region group, run ``rdk create-region-set`` to specify the filename, add the ``-o `` this will create a region set with the following tests and regions ``"default":["us-east-1","us-west-1","eu-north-1","ap-east-1"],"aws-cn-region-set":["cn-north-1","cn-northwest-1"]`` - -Using RDK to Generate a Lambda Layer in a region (Python3) ----------------------------------------------------------- -By default ``rdk init --generate-lambda-layer`` will generate an rdklib lambda layer while running init in whatever region it is run, to force re-generation of the layer, run ``rdk init --generate-lambda-layer`` again over a region - -To use this generated lambda layer, add the flag ``--generated-lambda-layer`` when running ``rdk deploy``. For example: ``rdk -f regions.yaml deploy LP3_TestRule_P39_lib --generated-lambda-layer`` - -If you created layer with a custom name (by running ``rdk init --custom-lambda-layer``, add a similar ``custom-lambda-layer`` flag when running deploy. - -Support & Feedback -================== - -This project is maintained by AWS Solution Architects and Consultants. It is not part of an AWS service and support is provided best-effort by the maintainers. -To post feedback, submit feature ideas, or report bugs, please use the `Issues section `_ of this repo. - -Contributing -============ - -email us at rdk-maintainers@amazon.com if you have any questions. We are happy to help and discuss. - -Contacts -======== -* **Ricky Chau** - `rickychau2780 `_ - *current maintainer* -* **Benjamin Morris** - `bmorrissirromb `_ - *current maintainer* -* **Mark Beacom** - `mbeacom `_ - *current maintainer* -* **Julio Delgado Jr** - `tekdj7 `_ - *current maintainer* - -Past Contributors -================= -* **Michael Borchert** - *Original Python version* -* **Jonathan Rault** - *Original Design, testing, feedback* -* **Greg Kim and Chris Gutierrez** - *Initial work and CI definitions* -* **Henry Huang** - *Original CFN templates and other code* -* **Santosh Kumar** - *maintainer* -* **Jose Obando** - *maintainer* -* **Jarrett Andrulis** - `jarrettandrulis `_ - *maintainer* -* **Sandeep Batchu** - `batchus `_ - *maintainer* - -License -======= - -This project is licensed under the Apache 2.0 License - -Acknowledgments -=============== - -* the boto3 team makes all of this magic possible. - - -Link -==== - -* to view example of rules built with the RDK: https://github.com/awslabs/aws-config-rules/tree/master/python From 6845ad5abc8f55e3c10074583954639fe9fc2454 Mon Sep 17 00:00:00 2001 From: Benjamin Morris <93620006+bmorrissirromb@users.noreply.github.com> Date: Wed, 21 Jun 2023 15:52:35 -0700 Subject: [PATCH 21/23] Delete testing directory --- testing/linux-python3-buildspec.yaml | 62 -------------------------- testing/windows-python3-buildspec.yaml | 59 ------------------------ 2 files changed, 121 deletions(-) delete mode 100644 testing/linux-python3-buildspec.yaml delete mode 100644 testing/windows-python3-buildspec.yaml diff --git a/testing/linux-python3-buildspec.yaml b/testing/linux-python3-buildspec.yaml deleted file mode 100644 index 74930af5..00000000 --- a/testing/linux-python3-buildspec.yaml +++ /dev/null @@ -1,62 +0,0 @@ -version: 0.1 - -phases: - install: - commands: - - echo Installing rdk... - - pip install . - - pip install rdklib - pre_build: - commands: - - echo Starting tests - - rm -rf LP3* - build: - commands: - - rdk create-region-set -o test-region - - rdk -f test-region.yaml init - - rdk create MFA_ENABLED_RULE --runtime python3.8 --resource-types AWS::IAM::User - - rdk -f test-region.yaml deploy MFA_ENABLED_RULE - - sleep 30 - - python3 testing/multi_region_execution_test.py - - sleep 30 - - rdk -f test-region.yaml undeploy --force MFA_ENABLED_RULE - - python3 testing/partition_test.py - - rdk init --generate-lambda-layer - - rdk create LP3_TestRule_P39_lib --runtime python3.9-lib --resource-types AWS::EC2::SecurityGroup - - rdk create LP3_TestRule_P38_lib --runtime python3.8-lib --resource-types AWS::EC2::SecurityGroup - - rdk create LP3_TestRule_P37_lib --runtime python3.7-lib --resource-types AWS::EC2::SecurityGroup - - rdk -f test-region.yaml deploy LP3_TestRule_P39_lib --generated-lambda-layer - - rdk -f test-region.yaml deploy LP3_TestRule_P38_lib --generated-lambda-layer - - rdk -f test-region.yaml deploy LP3_TestRule_P37_lib --generated-lambda-layer - - yes | rdk -f test-region.yaml undeploy LP3_TestRule_P39_lib - - yes | rdk -f test-region.yaml undeploy LP3_TestRule_P38_lib - - yes | rdk -f test-region.yaml undeploy LP3_TestRule_P37_lib - - rdk create LP3_TestRule_P39 --runtime python3.9 --resource-types AWS::EC2::SecurityGroup - - rdk create LP3_TestRule_P38 --runtime python3.8 --resource-types AWS::EC2::SecurityGroup - - rdk create LP3_TestRule_P37 --runtime python3.7 --resource-types AWS::EC2::SecurityGroup - - rdk create LP3_TestRule_P3 --runtime python3.9 --resource-types AWS::EC2::SecurityGroup - - rdk create LP3_TestRule_EFSFS --runtime python3.9 --resource-types AWS::EFS::FileSystem - - rdk create LP3_TestRule_ECSTD --runtime python3.7 --resource-types AWS::ECS::TaskDefinition - - rdk create LP3_TestRule_ECSS --runtime python3.9 --resource-types AWS::ECS::Service - - rdk modify LP3_TestRule_P3 --input-parameters '{"TestParameter":"TestValue"}' - - rdk create LP3_TestRule_P37_Periodic --runtime python3.7 --maximum-frequency One_Hour - - rdk create LP3_TestRule_P37lib_Periodic --runtime python3.7-lib --maximum-frequency One_Hour - - rdk create LP3_TestRule_P38_Periodic --runtime python3.8 --maximum-frequency One_Hour - - rdk create LP3_TestRule_P38lib_Periodic --runtime python3.8-lib --maximum-frequency One_Hour - - rdk create LP3_TestRule_P39_Periodic --runtime python3.9 --maximum-frequency One_Hour - - rdk create LP3_TestRule_P39lib_Periodic --runtime python3.9-lib --maximum-frequency One_Hour - - rdk test-local --all - - rdk deploy --all - - yes | rdk undeploy LP3_TestRule_P3 - - yes | rdk undeploy LP3_TestRule_P37 - - yes | rdk undeploy LP3_TestRule_P37_Periodic - - yes | rdk undeploy LP3_TestRule_P38 - - yes | rdk undeploy LP3_TestRule_P38_Periodic - - yes | rdk undeploy LP3_TestRule_P39 - - yes | rdk undeploy LP3_TestRule_P39_Periodic - - sleep 30 - - rdk logs LP3_TestRule_P3 - - yes | rdk undeploy -a - post_build: - commands: - - echo Build completed on `date` diff --git a/testing/windows-python3-buildspec.yaml b/testing/windows-python3-buildspec.yaml deleted file mode 100644 index 4e385fd6..00000000 --- a/testing/windows-python3-buildspec.yaml +++ /dev/null @@ -1,59 +0,0 @@ -version: 0.1 - -phases: - install: - commands: - - echo Installing rdk... - - pip install . - - pip install rdklib - pre_build: - commands: - - echo Starting tests - - rd -r WP3* - build: - commands: - - rdk create-region-set -o test-region - - rdk -f test-region.yaml init - - rdk create W_MFA_ENABLED_RULE --runtime python3.8 --resource-types AWS::IAM::User - - rdk -f test-region.yaml deploy W_MFA_ENABLED_RULE - - python testing/win_multi_region_execution_test.py - - rdk -f test-region.yaml undeploy --force W_MFA_ENABLED_RULE - - python testing/win_partition_test.py - - rdk init --generate-lambda-layer - - rdk create WP3_TestRule_P39_lib --runtime python3.9-lib --resource-types AWS::EC2::SecurityGroup - - rdk create WP3_TestRule_P38_lib --runtime python3.8-lib --resource-types AWS::EC2::SecurityGroup - - rdk create WP3_TestRule_P37_lib --runtime python3.7-lib --resource-types AWS::EC2::SecurityGroup - - rdk -f test-region.yaml deploy WP3_TestRule_P39_lib --generated-lambda-layer - - rdk -f test-region.yaml deploy WP3_TestRule_P38_lib --generated-lambda-layer - - rdk -f test-region.yaml deploy WP3_TestRule_P37_lib --generated-lambda-layer - - rdk -f test-region.yaml undeploy WP3_TestRule_P39_lib --force - - rdk -f test-region.yaml undeploy WP3_TestRule_P38_lib --force - - rdk -f test-region.yaml undeploy WP3_TestRule_P37_lib --force - - rdk create WP3_TestRule_P39 --runtime python3.9 --resource-types AWS::EC2::SecurityGroup - - rdk create WP3_TestRule_P38 --runtime python3.8 --resource-types AWS::EC2::SecurityGroup - - rdk create WP3_TestRule_P37 --runtime python3.7 --resource-types AWS::EC2::SecurityGroup - - rdk create WP3_TestRule_P3 --runtime python3.9 --resource-types AWS::EC2::SecurityGroup - - rdk create WP3_TestRule_EFSFS --runtime python3.9 --resource-types AWS::EFS::FileSystem - - rdk create WP3_TestRule_ECSTD --runtime python3.7 --resource-types AWS::ECS::TaskDefinition - - rdk create WP3_TestRule_ECSS --runtime python3.9 --resource-types AWS::ECS::Service - - rdk modify WP3_TestRule_P3 --runtime python3.8 - - rdk create WP3_TestRule_P37_Periodic --runtime python3.7 --maximum-frequency One_Hour - - rdk create WP3_TestRule_P37lib_Periodic --runtime python3.7-lib --maximum-frequency One_Hour - - rdk create WP3_TestRule_P38_Periodic --runtime python3.8 --maximum-frequency One_Hour - - rdk create WP3_TestRule_P38lib_Periodic --runtime python3.8-lib --maximum-frequency One_Hour - - rdk create WP3_TestRule_P39_Periodic --runtime python3.9 --maximum-frequency One_Hour - - rdk create WP3_TestRule_P39lib_Periodic --runtime python3.9-lib --maximum-frequency One_Hour - - rdk test-local --all - - rdk deploy --all - - rdk undeploy WP3_TestRule_P3 --force - - rdk undeploy WP3_TestRule_P37 --force - - rdk undeploy WP3_TestRule_P37_Periodic --force - - rdk undeploy WP3_TestRule_P38 --force - - rdk undeploy WP3_TestRule_P38_Periodic --force - - rdk undeploy WP3_TestRule_P39 --force - - rdk undeploy WP3_TestRule_P39_Periodic --force - - rdk logs WP3_TestRule_P3 - - rdk undeploy -a --force - post_build: - commands: - - echo Build completed on %date% From 70eab9184150a341a0c1919f937e2e4a89aa0936 Mon Sep 17 00:00:00 2001 From: Benjamin Morris Date: Wed, 21 Jun 2023 15:57:02 -0700 Subject: [PATCH 22/23] use new doc format --- docs/commands/clean.md | 7 + docs/commands/create-rule-template.md | 28 ++ docs/commands/create.md | 7 + docs/commands/deploy.md | 47 ++++ docs/commands/export.md | 19 ++ docs/commands/init.md | 27 ++ docs/commands/logs.md | 14 + docs/commands/modify.md | 7 + docs/commands/rulesets.md | 27 ++ docs/commands/sample-ci.md | 17 ++ docs/commands/test-local.md | 9 + docs/commands/undeploy.md | 10 + docs/conf.py | 179 ------------ docs/getting_started.rst | 262 ------------------ docs/index.md | 1 + docs/legacy-docs.md | 378 ++++++++++++++++++++++++++ docs/reference/test-local.rst | 10 - docs/requirements.txt | 305 ++++++++++++++++++++- 18 files changed, 897 insertions(+), 457 deletions(-) create mode 100644 docs/commands/clean.md create mode 100644 docs/commands/create-rule-template.md create mode 100644 docs/commands/create.md create mode 100644 docs/commands/deploy.md create mode 100644 docs/commands/export.md create mode 100644 docs/commands/init.md create mode 100644 docs/commands/logs.md create mode 100644 docs/commands/modify.md create mode 100644 docs/commands/rulesets.md create mode 100644 docs/commands/sample-ci.md create mode 100644 docs/commands/test-local.md create mode 100644 docs/commands/undeploy.md delete mode 100644 docs/conf.py delete mode 100644 docs/getting_started.rst create mode 100644 docs/index.md create mode 100644 docs/legacy-docs.md delete mode 100644 docs/reference/test-local.rst diff --git a/docs/commands/clean.md b/docs/commands/clean.md new file mode 100644 index 00000000..beae816b --- /dev/null +++ b/docs/commands/clean.md @@ -0,0 +1,7 @@ +# Clean + +The `clean` command is the inverse of the `init` command, and can be +used to completely remove Config resources from an account, including +the Configuration Recorder, Delivery Channel, S3 buckets, Roles, and +Permissions. This is useful for testing account provisioning automation +and for running automated tests in a clean environment. diff --git a/docs/commands/create-rule-template.md b/docs/commands/create-rule-template.md new file mode 100644 index 00000000..4a119a5c --- /dev/null +++ b/docs/commands/create-rule-template.md @@ -0,0 +1,28 @@ +# Create-Rule-Template + +Generates and saves to a file a single CloudFormation template that can +be used to deploy the specified Rule(s) into any account. This feature +has two primary uses: + +- Multi-account Config setup in which the Lambda Functions for custom rules are deployed into a centralized \"security\" or \"compliance\" account and the Config Rules themselves are deployed into \"application\" or \"satellite\" accounts. +- Combine many Config Rules into a single CloudFormation template for easier atomic deployment and management. + +The generated CloudFormation template includes a Parameter for the +AccountID that contains the Lambda functions that provide the compliance +logic for the Rules, and also exposes all of the Config Rule input +parameters as CloudFormation stack parameters. + +By default the generated CloudFormation template will set up Config as +per the settings used by the RDK `init` command, but those resources can +be omitted using the `--rules-only` flag. + +The `--config-role-arn` flag can be used for assigning existing config +role to the created Configuration Recorder. The +`-t | --tag-config-rules-script ` can now be used for output +the script generated for create tags for each config rule. + +As of version 0.6, RDK supports Config remediation. Note that in order +to use SSM documents for remediation you must supply all of the +necessary document parameters. These can be found in the SSM document +listing on the AWS console, but RDK will *not* validate at rule creation +that you have all of the necessary parameters supplied. diff --git a/docs/commands/create.md b/docs/commands/create.md new file mode 100644 index 00000000..a9a205fc --- /dev/null +++ b/docs/commands/create.md @@ -0,0 +1,7 @@ +# Create + +As of version 0.6, RDK supports Config remediation. Note that in order +to use SSM documents for remediation you must supply all of the +necessary document parameters. These can be found in the SSM document +listing on the AWS console, but RDK will *not* validate at rule creation +that you have all of the necessary parameters supplied. diff --git a/docs/commands/deploy.md b/docs/commands/deploy.md new file mode 100644 index 00000000..43c9f8f9 --- /dev/null +++ b/docs/commands/deploy.md @@ -0,0 +1,47 @@ +# Deploy + +This command will deploy the specified Rule(s) to the Account and Region +determined by the credentials being used to execute the command, and the +value of the AWS_DEFAULT_REGION environment variable, unless those +credentials or region are overridden using the common flags. + +Once deployed, RDK will **not** explicitly start a Rule evaluation. +Depending on the changes being made to your Config Rule setup AWS Config +may re-evaluate the deployed Rules automatically, or you can run an +evaluation using the AWS configservice CLI. + +The `--functions-only` flag can be used as part of a multi-account +deployment strategy to push **only** the Lambda functions (and +necessary Roles and Permssions) to the target account. This is intended +to be used in conjunction with the `create-rule-template` command in +order to separate the compliance logic from the evaluated accounts. For +an example of how this looks in practice, check out the [AWS +Compliance-as-Code +Engine](https://github.com/awslabs/aws-config-engine-for-compliance-as-code/). +The `--rdklib-layer-arn` flag can be used for attaching Lambda Layer ARN +that contains the desired rdklib. Note that Lambda Layers are +region-specific. The `--lambda-role-arn` flag can be used for assigning +existing iam role to all Lambda functions created for Custom Config +Rules. The `--lambda-layers` flag can be used for attaching a +comma-separated list of Lambda Layer ARNs to deploy with your Lambda +function(s). The `--lambda-subnets` flag can be used for attaching a +comma-separated list of Subnets to deploy your Lambda function(s). The +`--lambda-security-groups` flag can be used for attaching a +comma-separated list of Security Groups to deploy with your Lambda +function(s). The `--custom-code-bucket` flag can be used for providing +the custom code S3 bucket name, which is not created with rdk init, for +generated cloudformation template storage. The `--boundary-policy-arn` +flag can be used for attaching boundary Policy ARN that will be added to +rdkLambdaRole. The `--lambda-timeout` flag can be used for specifying +the timeout associated to the lambda function + +Note: Behind the scenes the `--functions-only` flag generates a +CloudFormation template and runs a \"create\" or \"update\" on the +targeted AWS Account and Region. If subsequent calls to `deploy` with +the `--functions-only` flag are made with the same stack name (either +the default or otherwise) but with *different Config rules targeted*, +any Rules deployed in previous `deploy`s but not included in the latest +`deploy` will be removed. After a functions-only `deploy` **only** the +Rules specifically targeted by that command (either through Rulesets or +an explicit list supplied on the command line) will be deployed in the +environment, all others will be removed.s diff --git a/docs/commands/export.md b/docs/commands/export.md new file mode 100644 index 00000000..2165b48e --- /dev/null +++ b/docs/commands/export.md @@ -0,0 +1,19 @@ +# Export + +This command will export the specified Rule(s) to the terraform file, it +supports the terraform versions 0.11 and 0.12. + +The `--format` flag can be used to specify export format, currently it +supports only terraform. The `--version` flag can be used to specify the +terraform version. The `--rdklib-layer-arn` flag can be used for +attaching Lambda Layer ARN that contains the desired rdklib. Note that +Lambda Layers are region-specific. The `--lambda-role-arn` flag can be +used for assigning existing iam role to all Lambda functions created for +Custom Config Rules. The `--lambda-layers` flag can be used for +attaching a comma-separated list of Lambda Layer ARNs to deploy with +your Lambda function(s). The `--lambda-subnets` flag can be used for +attaching a comma-separated list of Subnets to deploy your Lambda +function(s). The `--lambda-security-groups` flag can be used for +attaching a comma-separated list of Security Groups to deploy with your +Lambda function(s). The `--lambda-timeout` flag can be used for +specifying the timeout associated to the lambda function diff --git a/docs/commands/init.md b/docs/commands/init.md new file mode 100644 index 00000000..4292ebab --- /dev/null +++ b/docs/commands/init.md @@ -0,0 +1,27 @@ +# Init + +Sets up the AWS Config Service in an AWS Account. This includes: + +- Config Configuration Recorder +- Config Delivery Channel +- IAM Role for Delivery Channel +- S3 Bucket for Configuration Snapshots +- S3 Bucket for Lambda Code + +Additionally, `init` will make sure that the Configuration Recorder is +on and functioning, that the Delivery Channel has the appropriate Role +attached, and that the Delivery Channel Role has the proper permissions. + +Note: Even without Config Rules running the Configuration Recorder is +still capturing Configuration Item snapshots and storing them in S3, so +running `init` will incur AWS charges! + +Also Note: AWS Config is a regional service, so running `init` will only +set up Config in the region currently specified in your +AWS_DEFAULT_REGION environment variable or in the `--region` flag. + +Advanced Options: + +- `--config-bucket-exists-in-another-account`: \[optional\] If the bucket being used by a Config Delivery Channel exists in another account, it is possible to skip the check that the bucket exists. This is useful when using `init` to initialize AWS Config in an account which already has a delivery channel setup with a central bucket. Currently, the rdk lists out all the buckets within the account your are running `init` from, to check if the provided bucket name exists, if it doesn\'t then it will create it. This presents an issue when a Config Delivery Channel has been configured to push configuration recordings to a central bucket. The bucket will never be found as it doesn\'t exist in the same account, but cannot be created as bucket names have to be globally unique. +- `--skip-code-bucket-creation`: \[optional\] If you want to use custom code bucket for rdk, enable this and use flag `--custom-code-bucket` to `rdk deploy` +- `control-tower`: \[optional\] If your account is part of an AWS Control Tower setup \--control-tower will skip the setup of configuration_recorder and delivery_channel diff --git a/docs/commands/logs.md b/docs/commands/logs.md new file mode 100644 index 00000000..6c8e2033 --- /dev/null +++ b/docs/commands/logs.md @@ -0,0 +1,14 @@ +# Logs + +The `logs` command provides a shortcut to accessing the CloudWatch Logs +output from the Lambda Functions that back your custom Config Rules. +Logs are displayed in chronological order going back the number of log +entries specified by the `--number` flag (default 3). It supports a +`--follow` flag similar to the UNIX command `tail` so that you can +choose to continually poll CloudWatch to deliver new log items as they +are delivered by your Lambda function. + +In addition to any output that your function emits via `print()` or +`console.log()` commands, Lambda will also record log lines for the +start and stop of each Lambda invocation, including the runtime and +memory usage. diff --git a/docs/commands/modify.md b/docs/commands/modify.md new file mode 100644 index 00000000..98b0a528 --- /dev/null +++ b/docs/commands/modify.md @@ -0,0 +1,7 @@ +# Modify + +Used to modify the local metadata for Config Rules created by the RDK. +This command takes the same arguments as the `create` command (all of +them optional), and overwrites the Rule metadata for any flag specified. +Changes made using `modify` are not automatically pushed out to your AWS +Account, and must be deployed as usual using the `deploy` command. diff --git a/docs/commands/rulesets.md b/docs/commands/rulesets.md new file mode 100644 index 00000000..c4fc6743 --- /dev/null +++ b/docs/commands/rulesets.md @@ -0,0 +1,27 @@ +# Rulesets + +Rulesets provide a mechanism to tag individual Config Rules into groups +that can be acted on as a unit. Ruleset tags are single keywords, and +the commands `deploy`, `create-rule-template`, and `undeploy` can all +expand Ruleset parameters and operate on the resulting list of Rules. + +The most common use-case for Rulesets is to define standardized Account +metadata or data classifications, and then tag individual Rules to all +of the appropriate metadata tags or classification levels. + +Example: If you have Account classifications of \"Public\", \"Private\", +and \"Restricted\" you can tag all of your Rules as \"Restricted\", and +a subset of them that deal with private network security as \"Private\". +Then when you need to deploy controls to a new \"Private\" account you +can simply use `rdk create-rule-template --rulesets Private` to generate +a CloudFormation template that includes all of the Rules necessary for +your \"Private\" classification, but omit the Rules that are only +necessary for \"Restricted\" accounts. Additionally, as your compliance +requirements change and you add Config Rules you can tag them as +appropriate, re-generate your CloudFormation templates, and re-deploy to +make sure your Accounts are all up-to-date. + +You may also choose to classify accounts using binary attributes +(\"Prod\" vs. \"Non-Prod\" or \"PCI\" vs. \"Non-PCI\"), and then +generate account-specific CloudFormation templates using the Account +metadata to ensure that the appropriate controls are deployed. diff --git a/docs/commands/sample-ci.md b/docs/commands/sample-ci.md new file mode 100644 index 00000000..eaf93d81 --- /dev/null +++ b/docs/commands/sample-ci.md @@ -0,0 +1,17 @@ +# Sample-CI + +This utility command outputs a sample Configuration Item for the +specified resource type. This can be useful when writing new custom +Config Rules to help developers know what the CI structure and plausible +values for the resource type are. + +Note that you can construct Config Evaluations for any resource type +that is supported by CloudFormation, however you can not create +change-triggered Config Rules for resource types not explicitly +supported by Config, and some of the console functionality in AWS Config +may be limited. + +[CFN-supported +resources](https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/cfn-supported-resources.html) +[Config-supported +resources](https://docs.aws.amazon.com/config/latest/developerguide/resource-config-reference.html) diff --git a/docs/commands/test-local.md b/docs/commands/test-local.md new file mode 100644 index 00000000..6ece0657 --- /dev/null +++ b/docs/commands/test-local.md @@ -0,0 +1,9 @@ +# Test-Local + +Shorthand command for running the unit tests defined for Config Rules +that use a Python runtime. When a Python 3.7+ Rule is created using the +`create` command a unit test template is created in the Rule directory. +This test boilerplate includes minimal tests, as well as a framework for +using the `unittest.mock` library for stubbing out Boto3 calls. This +allows more sophisticated test cases to be written for Periodic rules +that need to make API calls to gather information about the environment. diff --git a/docs/commands/undeploy.md b/docs/commands/undeploy.md new file mode 100644 index 00000000..83922f4e --- /dev/null +++ b/docs/commands/undeploy.md @@ -0,0 +1,10 @@ +# Undeploy + +The inverse of `deploy`, this command is used to remove a Config Rule +and its Lambda Function from the targeted account. + +This is intended to be used primarily for clean-up for testing +deployment automation (perhaps from a CI/CD pipeline) to ensure that it +works from an empty account, or to clean up a test account during +development. See also the [clean](./clean.html) command if you want to +more thoroughly scrub Config from your account. diff --git a/docs/conf.py b/docs/conf.py deleted file mode 100644 index 0328f360..00000000 --- a/docs/conf.py +++ /dev/null @@ -1,179 +0,0 @@ -# Configuration file for the Sphinx documentation builder. -# -# This file does only contain a selection of the most common options. For a -# full list see the documentation: -# http://www.sphinx-doc.org/en/master/config - -# -- Path setup -------------------------------------------------------------- - -# If extensions (or modules to document with autodoc) are in another directory, -# add these directories to sys.path here. If the directory is relative to the -# documentation root, use os.path.abspath to make it absolute, like shown here. -# -import os -import sys - -sys.path.insert(0, os.path.abspath(".")) -sys.path.insert(0, os.path.abspath("../")) -sys.path.insert(0, os.path.abspath("../rdk/")) - - -# -- Project information ----------------------------------------------------- - -project = "RDK" -copyright = "2017-2023 Amazon.com, Inc. or its affiliates. All Rights Reserved" -author = "RDK Maintainers" - -# The short X.Y version -version = "" -# The full version, including alpha/beta/rc tags -release = "1.0" - -on_rtd = os.environ.get("READTHEDOCS", None) == "True" - -if not on_rtd: # only import and set the theme if we're building docs locally, tested with sphinx-rtd-theme==0.4.3 - import sphinx_rtd_theme - - html_theme = "sphinx_rtd_theme" - html_theme_path = [sphinx_rtd_theme.get_html_theme_path()] - -# -- General configuration --------------------------------------------------- - -# If your documentation needs a minimal Sphinx version, state it here. -# -# needs_sphinx = '1.0' - -# Add any Sphinx extension module names here, as strings. They can be -# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom -# ones. -extensions = [ - "sphinx.ext.autodoc", - "sphinx.ext.intersphinx", - "sphinx.ext.todo", - "sphinx.ext.viewcode", - "sphinx.ext.githubpages", - "sphinxarg.ext", -] - -# Add any paths that contain templates here, relative to this directory. -templates_path = ["_templates"] - -# The suffix(es) of source filenames. -# You can specify multiple suffix as a list of string: -# -# source_suffix = ['.rst', '.md'] -source_suffix = ".rst" - -# The master toctree document. -master_doc = "index" - -# The language for content autogenerated by Sphinx. Refer to documentation -# for a list of supported languages. -# -# This is also used if you do content translation via gettext catalogs. -# Usually you set "language" from the command line for these cases. -language = None - -# List of patterns, relative to source directory, that match files and -# directories to ignore when looking for source files. -# This pattern also affects html_static_path and html_extra_path . -exclude_patterns = ["_build", "Thumbs.db", ".DS_Store"] - -# The name of the Pygments (syntax highlighting) style to use. -pygments_style = "sphinx" - - -# -- Options for HTML output ------------------------------------------------- - -# The theme to use for HTML and HTML Help pages. See the documentation for -# a list of builtin themes. -# -# html_theme = 'default' - -# Theme options are theme-specific and customize the look and feel of a theme -# further. For a list of options available for each theme, see the -# documentation. -# -# html_theme_options = {} - -# Add any paths that contain custom static files (such as style sheets) here, -# relative to this directory. They are copied after the builtin static files, -# so a file named "default.css" will overwrite the builtin "default.css". -html_static_path = ["_static"] - -html_context = { - "css_files": [ - "_static/theme_overrides.css", # override wide tables in RTD theme - ], -} - -# Custom sidebar templates, must be a dictionary that maps document names -# to template names. -# -# The default sidebars (for documents that don't match any pattern) are -# defined by theme itself. Builtin themes are using these templates by -# default: ``['localtoc.html', 'relations.html', 'sourcelink.html', -# 'searchbox.html']``. -# -# html_sidebars = {} - - -# -- Options for HTMLHelp output --------------------------------------------- - -# Output file base name for HTML help builder. -htmlhelp_basename = "RDKdoc" - - -# -- Options for LaTeX output ------------------------------------------------ - -latex_elements = { - # The paper size ('letterpaper' or 'a4paper'). - # - # 'papersize': 'letterpaper', - # The font size ('10pt', '11pt' or '12pt'). - # - # 'pointsize': '10pt', - # Additional stuff for the LaTeX preamble. - # - # 'preamble': '', - # Latex figure (float) alignment - # - # 'figure_align': 'htbp', -} - -# Grouping the document tree into LaTeX files. List of tuples -# (source start file, target name, title, -# author, documentclass [howto, manual, or own class]). -latex_documents = [ - (master_doc, "RDK.tex", "RDK Documentation", "RDK Maintainers", "manual"), -] - - -# -- Options for manual page output ------------------------------------------ - -# One entry per manual page. List of tuples -# (source start file, name, description, authors, manual section). -man_pages = [(master_doc, "rdk", "RDK Documentation", [author], 1)] - - -# -- Options for Texinfo output ---------------------------------------------- - -# Grouping the document tree into Texinfo files. List of tuples -# (source start file, target name, title, author, -# dir menu entry, description, category) -texinfo_documents = [ - (master_doc, "RDK", "RDK Documentation", author, "RDK", "One line description of project.", "Miscellaneous"), -] - - -# -- Extension configuration ------------------------------------------------- - -# -- Options for intersphinx extension --------------------------------------- - -# Example configuration for intersphinx: refer to the Python standard library. -intersphinx_mapping = {"https://docs.python.org/": None} - -# -- Options for todo extension ---------------------------------------------- - -# If true, `todo` and `todoList` produce output, else they produce nothing. -todo_include_todos = True diff --git a/docs/getting_started.rst b/docs/getting_started.rst deleted file mode 100644 index 4ae573cd..00000000 --- a/docs/getting_started.rst +++ /dev/null @@ -1,262 +0,0 @@ -Getting Started -=============== - -Let's get started using the RDK! - -Prerequisites -------------- - -RDK uses python 3.7+. You will need to have an AWS account and sufficient permissions to manage the Config service, and to create and manage S3 Buckets, Roles, and Lambda Functions. An AWS IAM Policy Document that describes the minimum necessary permissions can be found `here `_ on github. - -Under the hood, rdk uses boto3 to make API calls to AWS, so you can set your credentials any way that boto3 recognizes (options 3 through 8 in the `boto docs here `_ ) or pass them in with the command-line parameters --profile, --region, --access-key-id, or --secret-access-key - -.. _permissions: http://www.python.org/ - -Installation ------------- - -If you just want to use the RDK, go ahead and install it using pip:: - -$ pip install rdk - -Alternately, if you want to see the code and/or contribute you can clone the `git repo `_ , and then from the repo directory use pip to install the package. Use the '-e' flag to generate symlinks so that any edits you make will be reflected when you run the installed package. - -If you are going to author your Lambda functions using Java you will need to have Java 8 and gradle installed. If you are going to author your Lambda functions in C# you will need to have the dotnet CLI and the .NET Core Runtime 1.08 installed. -:: - - $ pip install -e . - -To make sure the rdk is installed correctly, running the package from the command line without any arguments should display help information. - -:: - - $ rdk - usage: rdk [-h] [-p PROFILE] [-k ACCESS_KEY] [-s SECRET_ACCESS_KEY] - [-r REGION] - ... - rdk: error: the following arguments are required: , - - -Usage ------ - -Configure your env -~~~~~~~~~~~~~~~~~~ -To use the RDK, it's recommended to create a directory that will be your working directory. This should be committed to a source code repo, and ideally created as a python virtualenv. In that directory, run the ``init`` command to set up your AWS Config environment. - -:: - - $ rdk init - Running init! - Creating Config bucket config-bucket-780784666283 - Creating IAM role config-role - Waiting for IAM role to propagate - Config Service is ON - Config setup complete. - Creating Code bucket config-rule-code-bucket-780784666283ap-southeast-1 - -Running ``init`` subsequent times will validate your AWS Config setup and re-create any S3 buckets or IAM resources that are needed. - -Create Rules -~~~~~~~~~~~~ -In your working directory, use the ``create`` command to start creating a new custom rule. You must specify the runtime for the lambda function that will back the Rule, and you can also specify a resource type (or comma-separated list of types) that the Rule will evaluate or a maximum frequency for a periodic rule. This will add a new directory for the rule and populate it with several files, including a skeleton of your Lambda code. - -:: - - $ rdk create MyRule --runtime python3.8 --resource-types AWS::EC2::Instance --input-parameters '{"desiredInstanceType":"t2.micro"}' - Running create! - Local Rule files created. - -On Windows it is necessary to escape the double-quotes when specifying input parameters, so the `--input-parameters` argument would instead look something like this:: - - '{\"desiredInstanceType\":\"t2.micro\"}' - -Note that you can create rules that use EITHER resource-types OR maximum-frequency, but not both. We have found that rules that try to be both event-triggered as well as periodic wind up being very complicated and so we do not recommend it as a best practice. - -Edit Rules Locally -~~~~~~~~~~~~~~~~~~ -Once you have created the rule, edit the python file in your rule directory (in the above example it would be ``MyRule/MyRule.py``, but may be deeper into the rule directory tree depending on your chosen Lambda runtime) to add whatever logic your Rule requires in the ``evaluate_compliance`` function. You will have access to the CI that was sent by Config, as well as any parameters configured for the Config Rule. Your function should return either a simple compliance status (one of ``COMPLIANT``, ``NONCOMPLIANT``, or ``NOT_APPLICABLE``), or if you're using the python or node runtimes you can return a JSON object with multiple evaluation responses that the RDK will send back to AWS Config. An example would look like:: - - for sg in response['SecurityGroups']: - evaluations.append( - { - 'ComplianceResourceType': 'AWS::EC2::SecurityGroup', - 'ComplianceResourceId': sg['GroupId'], - 'ComplianceType': 'COMPLIANT', - 'Annotation': 'This is an important note.', - 'OrderingTimestamp': str(datetime.datetime.now()) - }) - - - return evaluations - -This is necessary for periodic rules that are not triggered by any CI change (which means the CI that is passed in will be null), and also for attaching annotations to your evaluation results. - -If you want to see what the JSON structure of a CI looks like for creating your logic, you can use - -:: - -$ rdk sample-ci - -to output a formatted JSON document. - -Write and Run Unit Tests -~~~~~~~~~~~~~~~~~~~~~~~~ -If you are writing Config Rules using either of the Python runtimes there will be a _test.py file deployed along with your Lambda function skeleton. This can be used to write unit tests according to the standard Python unittest framework (documented here: https://docs.python.org/3/library/unittest.html), which can be run using the `test-local` rdk command:: - - $ rdk test-local MyTestRule - Running local test! - Testing MyTestRule - Looking for tests in /Users/mborch/Code/rdk-dev/MyTestRule - - --------------------------------------------------------------------- - - Ran 0 tests in 0.000s - - OK - - -The test file includes setup for the MagicMock library that can be used to stub boto3 API calls if your rule logic will involve making API calls to gather additional information about your AWS environment. For some tips on how to do this, check out this blog post: https://sgillies.net/2017/10/19/mock-is-magic.html - -Modify Rule -~~~~~~~~~~~ -If you need to change the parameters of a Config rule in your working directory you can use the ``modify`` command. Any parameters you specify will overwrite existing values, any that you do not specify will not be changed. - -:: - - $ rdk modify MyRule --runtime python3.9 --maximum-frequency TwentyFour_Hours --input-parameters '{"desiredInstanceType":"t2.micro"}' - Running modify! - Modified Rule 'MyRule'. Use the `deploy` command to push your changes to AWS. - -Again, on Windows the input parameters would look like:: - - '{\"desiredInstanceType\":\"t2.micro\"}' - -It is worth noting that until you actually call the ``deploy`` command your rule only exists in your working directory, none of the Rule commands discussed thus far actually makes changes to your account. - -Deploy Rule -~~~~~~~~~~~ -Once you have completed your compliance validation code and set your Rule's configuration, you can deploy the Rule to your account using the ``deploy`` command. This will zip up your code (and the other associated code files, if any) into a deployable package (or run a gradle build if you have selected the java8 runtime or run the lambda packaging step from the dotnet CLI if you have selected the dotnetcore1.0 runtime), copy that zip file to S3, and then launch or update a CloudFormation stack that defines your Config Rule, Lambda function, and the necessary permissions and IAM Roles for it to function. Since CloudFormation does not deeply inspect Lambda code objects in S3 to construct its changeset, the ``deploy`` command will also directly update the Lambda function for any subsequent deployments to make sure code changes are propagated correctly. - -:: - - $ rdk deploy MyRule - Running deploy! - Zipping MyRule - Uploading MyRule - Creating CloudFormation Stack for MyRule - Waiting for CloudFormation stack operation to complete... - ... - Waiting for CloudFormation stack operation to complete... - Config deploy complete. - -The exact output will vary depending on Lambda runtime. You can use the --all flag to deploy all of the rules in your working directory. - -View Logs For Deployed Rule -~~~~~~~~~~~~~~~~~~~~~~~~~~~ -Once the Rule has been deployed to AWS you can get the CloudWatch logs associated with your lambda function using the ``logs`` command. - -:: - - $ rdk logs MyRule -n 5 - 2017-11-15 22:59:33 - START RequestId: 96e7639a-ca15-11e7-95a2-b1521890638d Version: $LATEST - 2017-11-15 23:41:13 - REPORT RequestId: 68e0304f-ca1b-11e7-b735-81ebae95acda Duration: 0.50 ms Billed Duration: 100 ms Memory Size: 256 MB - Max Memory Used: 36 MB - 2017-11-15 23:41:13 - END RequestId: 68e0304f-ca1b-11e7-b735-81ebae95acda - 2017-11-15 23:41:13 - Default RDK utility class does not yet support Scheduled Notifications. - 2017-11-15 23:41:13 - START RequestId: 68e0304f-ca1b-11e7-b735-81ebae95acda Version: $LATEST - -You can use the ``-n`` and ``-f`` command line flags just like the UNIX ``tail`` command to view a larger number of log events and to continuously poll for new events. The latter option can be useful in conjunction with manually initiating Config Evaluations for your deploy Config Rule to make sure it is behaving as expected. - - -Advanced Features ------------------ -Cross-Account Deployments -~~~~~~~~~~~~~~~~~~~~~~~~~ -Features have been added to the RDK to facilitate the cross-account deployment pattern that enterprise customers have standardized on for custom Config Rules. A cross-account architecture is one in which the Lambda functions are deployed to a single central "Compliance" account (which may be the same as a central "Security" account), and the Config Rules are deployed to any number of "Satellite" accounts that are used by other teams or departments. This gives the compliance team confidence that their Rule logic cannot be tampered with and makes it much easier for them to modify rule logic without having to go through a complex deployment process to potentially hundreds of AWS accounts. The cross-account pattern uses two advanced RDK features - functions-only deployments and the `create-rule-template` command. - -**Function-Only Deployment** - -By using the `-f` or `--functions-only` flag on the `deploy` command the RDK will deploy only the necessary Lambda Functions, Lambda Execution Role, and Lambda Permissions to the account specified by the execution credentials. It accomplishes this by batching up all of the Lambda function CloudFormation snippets for the selected Rule(s) into a single dynamically generated template and deploy that CloudFormation template. One consequence of this is that subsequent deployments that specify a different set of Rules for the same stack name will update that CloudFormation stack, and any Rules that were included in the first deployment but not in the second will be removed. You can use the `--stack-name` parameter to override the default CloudFormation stack name if you need to manage different subsets of your Lambda Functions independently. The intended usage is to deploy the functions for all of the Config rules in the Security/Compliance account, which can be done simply by using `rdk deploy -f --all` from your working directory. - -**`create-rule-template` command** - -This command generates a CloudFormation template that defines the AWS Config rules themselves, along with the Config Role, Config data bucket, Configuration Recorder, and Delivery channel necessary for the Config rules to work in a satellite account. You must specify the file name for the generated template using the `--output-file` or `o` command line flags. The generated template takes a single parameter of the AccountID of the central compliance account that contains the Lambda functions that will back your custom Config Rules. The generated template can be deployed in the desired satellite accounts through any of the means that you can deploy any other CloudFormation template, including the console, the CLI, as a CodePipeline task, or using StackSets. The `create-rule-template` command takes all of the standard arguments for selecting Rules to include in the generated template, including lists of individual Rule names, an `--all` flag, or using the RuleSets feature described below. - -:: - - $ rdk create-rule-template -o remote-rule-template.json --all - Generating CloudFormation template! - CloudFormation template written to remote-rule-template.json - - -RuleSets -~~~~~~~~ -New as of version 0.3.11, it is possible to add RuleSet tags to rules that can be used to deploy and test groups of rules together. Rules can belong to multiple RuleSets, and RuleSet membership is stored only in the parameters.json metadata. The `deploy`, `create-rule-template`, and `test-local` commands are RuleSet-aware such that a RuleSet can be passed in as the target instead of `--all` or a specific named Rule. - -A comma-delimited list of RuleSets can be added to a Rule when you create it (using the `--rulesets` flag), as part of a `modify` command, or using new `ruleset` subcommands to add or remove individual rules from a RuleSet. - -Running `rdk rulesets list` will display a list of the RuleSets currently defined across all of the Rules in the working directory - -:: - - rdk-dev $ rdk rulesets list - RuleSets: AnotherRuleSet MyNewSet - -Naming a specific RuleSet will list all of the Rules that are part of that RuleSet. - -:: - - rdk-dev $ rdk rulesets list AnotherRuleSet - Rules in AnotherRuleSet : RSTest - -Rules can be added to or removed from RuleSets using the `add` and `remove` subcommands: - -:: - - rdk-dev $ rdk rulesets add MyNewSet RSTest - RSTest added to RuleSet MyNewSet - - rdk-dev $ rdk rulesets remove AnotherRuleSet RSTest - RSTest removed from RuleSet AnotherRuleSet - -RuleSets are a convenient way to maintain a single repository of Config Rules that may need to have subsets of them deployed to different environments. For example your development environment may contain some of the Rules that you run in Production but not all of them; RuleSets gives you a way to identify and selectively deploy the appropriate Rules to each environment. - - -Region Sets -~~~~~~~~~~~ -`rdk init`, `rdk deploy`, and `rdk undeploy` subcommands now support running across multiple regions in parallel using region sets defined in a yaml file. - -To run a subcommand with a region set, pass in the region set yaml file and the specific region set to run through. - -:: - - $ rdk -f regions.yaml --region-set region-set-1 undeploy CUSTOM_RULE - Deleting rules in the following regions: ['sa-east-1', 'us-east-1']. - Delete specified Rules and Lambda Functions from your AWS Account? (y/N): y - [sa-east-1] Running un-deploy! - [us-east-1] Running un-deploy! - [us-east-1] Rule removal initiated. Waiting for Stack Deletion to complete. - [sa-east-1] Rule removal initiated. Waiting for Stack Deletion to complete. - [us-east-1] CloudFormation stack operation complete. - [us-east-1] Rule removal complete, but local files have been preserved. - [us-east-1] To re-deploy, use the 'deploy' command. - [sa-east-1] CloudFormation stack operation complete. - [sa-east-1] Rule removal complete, but local files have been preserved. - [sa-east-1] To re-deploy, use the 'deploy' command. - -Example region set file: - -:: - - default: - - us-west-1 - - us-west-2 - region-set-1: - - sa-east-1 - - us-east-1 - region-set-2: - - ap-southeast-1 - - eu-central-1 - - sa-east-1 - - us-east-1 diff --git a/docs/index.md b/docs/index.md new file mode 100644 index 00000000..32d46ee8 --- /dev/null +++ b/docs/index.md @@ -0,0 +1 @@ +../README.md \ No newline at end of file diff --git a/docs/legacy-docs.md b/docs/legacy-docs.md new file mode 100644 index 00000000..d7bba05c --- /dev/null +++ b/docs/legacy-docs.md @@ -0,0 +1,378 @@ +# Legacy RDK Documentation + +Please note, this documentation is a carry-over from the old RTD documentation pre-mkdocs. +This will likely be removed in a subsequent release. + +## Introduction + +Rule Development Kit - Version 2 This tool should be considered in +"Open Beta". We would greatly appreciate feedback and bug reports +either as github issues or emails to ! + +The RDK is designed to support a "Compliance-as-Code" workflow that is +intuitive and productive. It abstracts away much of the undifferentiated +heavy lifting associated with deploying AWS Config rules backed by +custom lambda functions, and provides a streamlined +develop-deploy-monitor iterative process. + +## Prerequisites + +RDK uses python 3.7+. You will need to have an AWS account and +sufficient permissions to manage the Config service, and to create and +manage S3 Buckets, Roles, and Lambda Functions. An AWS IAM Policy +Document that describes the minimum necessary permissions can be found +[here](https://github.com/awslabs/aws-config-rdk/blob/master/policy/rdk-minimum-permissions.json) +on github. + +Under the hood, rdk uses boto3 to make API calls to AWS, so you can set +your credentials any way that boto3 recognizes (options 3 through 8 in +the [boto docs +here](https://boto3.readthedocs.io/en/latest/guide/configuration.html) ) +or pass them in with the command-line parameters --profile, --region, +--access-key-id, or --secret-access-key + +## Installation + +If you just want to use the RDK, go ahead and install it using pip: + + pip install rdk + +Alternately, if you want to see the code and/or contribute you can clone +the [git repo](https://github.com/awslabs/aws-config-rdk/) , and then +from the repo directory use pip to install the package. Use the '-e' +flag to generate symlinks so that any edits you make will be reflected +when you run the installed package. + +If you are going to author your Lambda functions using Java you will +need to have Java 8 and gradle installed. If you are going to author +your Lambda functions in C# you will need to have the dotnet CLI and the +.NET Core Runtime 1.08 installed. : + + pip install -e . + +To make sure the rdk is installed correctly, running the package from +the command line without any arguments should display help information. + + rdk + usage: rdk [-h] [-p PROFILE] [-k ACCESS_KEY] [-s SECRET_ACCESS_KEY] + [-r REGION] + ... + rdk: error: the following arguments are required: , + +## Usage + +### Configure your env + +To use the RDK, it's recommended to create a directory that will be +your working directory. This should be committed to a source code repo, +and ideally created as a python virtualenv. In that directory, run the +`init` command to set up your AWS Config environment. + + rdk init + Running init! + Creating Config bucket config-bucket-780784666283 + Creating IAM role config-role + Waiting for IAM role to propagate + Config Service is ON + Config setup complete. + Creating Code bucket config-rule-code-bucket-780784666283ap-southeast-1 + +Running `init` subsequent times will validate your AWS Config setup and +re-create any S3 buckets or IAM resources that are needed. + +### Create Rules + +In your working directory, use the `create` command to start creating a +new custom rule. You must specify the runtime for the lambda function +that will back the Rule, and you can also specify a resource type (or +comma-separated list of types) that the Rule will evaluate or a maximum +frequency for a periodic rule. This will add a new directory for the +rule and populate it with several files, including a skeleton of your +Lambda code. + + rdk create MyRule --runtime python3.8 --resource-types AWS::EC2::Instance --input-parameters '{"desiredInstanceType":"t2.micro"}' + Running create! + Local Rule files created. + +On Windows it is necessary to escape the double-quotes when specifying +input parameters, so the --input-parameters argument +would instead look something like this: + + '{"desiredInstanceType":"t2.micro"}' + +Note that you can create rules that use EITHER resource-types OR +maximum-frequency, but not both. We have found that rules that try to be +both event-triggered as well as periodic wind up being very complicated +and so we do not recommend it as a best practice. + +### Edit Rules Locally + +Once you have created the rule, edit the python file in your rule +directory (in the above example it would be `MyRule/MyRule.py`, but may +be deeper into the rule directory tree depending on your chosen Lambda +runtime) to add whatever logic your Rule requires in the +`evaluate_compliance` function. You will have access to the CI that was +sent by Config, as well as any parameters configured for the Config +Rule. Your function should return either a simple compliance status (one +of `COMPLIANT`, `NONCOMPLIANT`, or `NOT_APPLICABLE`), or if you're +using the python or node runtimes you can return a JSON object with +multiple evaluation responses that the RDK will send back to AWS Config. +An example would look like: + + for sg in response['SecurityGroups']: + evaluations.append( + { + 'ComplianceResourceType': 'AWS::EC2::SecurityGroup', + 'ComplianceResourceId': sg['GroupId'], + 'ComplianceType': 'COMPLIANT', + 'Annotation': 'This is an important note.', + 'OrderingTimestamp': str(datetime.datetime.now()) + }) + + + return evaluations + +This is necessary for periodic rules that are not triggered by any CI +change (which means the CI that is passed in will be null), and also for +attaching annotations to your evaluation results. + +If you want to see what the JSON structure of a CI looks like for +creating your logic, you can use + + rdk sample-ci + +to output a formatted JSON document. + +### Write and Run Unit Tests + +If you are writing Config Rules using either of the Python runtimes +there will be a `_test.py` file deployed along with your +Lambda function skeleton. This can be used to write unit tests according +to the standard Python unittest framework (documented here: +), which can be run +using the [test-local]{.title-ref} rdk command: + + rdk test-local MyTestRule + Running local test! + Testing MyTestRule + Looking for tests in /Users/mborch/Code/rdk-dev/MyTestRule + + --------------------------------------------------------------------- + + Ran 0 tests in 0.000s + + OK + + +The test file includes setup for the MagicMock library that can be used +to stub boto3 API calls if your rule logic will involve making API calls +to gather additional information about your AWS environment. For some +tips on how to do this, check out this blog post: + + +### Modify Rule + +If you need to change the parameters of a Config rule in your working +directory you can use the `modify` command. Any parameters you specify +will overwrite existing values, any that you do not specify will not be +changed. + + rdk modify MyRule --runtime python3.9 --maximum-frequency TwentyFour_Hours --input-parameters '{"desiredInstanceType":"t2.micro"}' + Running modify! + Modified Rule 'MyRule'. Use the `deploy` command to push your changes to AWS. + +Again, on Windows the input parameters would look like: + + '{"desiredInstanceType":"t2.micro"}' + +It is worth noting that until you actually call the `deploy` command +your rule only exists in your working directory, none of the Rule +commands discussed thus far actually makes changes to your account. + +### Deploy Rule + +Once you have completed your compliance validation code and set your +Rule's configuration, you can deploy the Rule to your account using the +`deploy` command. This will zip up your code (and the other associated +code files, if any) into a deployable package (or run a gradle build if +you have selected the java8 runtime or run the lambda packaging step +from the dotnet CLI if you have selected the dotnetcore1.0 runtime), +copy that zip file to S3, and then launch or update a CloudFormation +stack that defines your Config Rule, Lambda function, and the necessary +permissions and IAM Roles for it to function. Since CloudFormation does +not deeply inspect Lambda code objects in S3 to construct its changeset, +the `deploy` command will also directly update the Lambda function for +any subsequent deployments to make sure code changes are propagated +correctly. + + rdk deploy MyRule + Running deploy! + Zipping MyRule + Uploading MyRule + Creating CloudFormation Stack for MyRule + Waiting for CloudFormation stack operation to complete... + ... + Waiting for CloudFormation stack operation to complete... + Config deploy complete. + +The exact output will vary depending on Lambda runtime. You can use the +--all flag to deploy all of the rules in your working directory. + +### View Logs For Deployed Rule + +Once the Rule has been deployed to AWS you can get the CloudWatch logs +associated with your lambda function using the `logs` command. + + rdk logs MyRule -n 5 + 2017-11-15 22:59:33 - START RequestId: 96e7639a-ca15-11e7-95a2-b1521890638d Version: $LATEST + 2017-11-15 23:41:13 - REPORT RequestId: 68e0304f-ca1b-11e7-b735-81ebae95acda Duration: 0.50 ms Billed Duration: 100 ms Memory Size: 256 MB + Max Memory Used: 36 MB + 2017-11-15 23:41:13 - END RequestId: 68e0304f-ca1b-11e7-b735-81ebae95acda + 2017-11-15 23:41:13 - Default RDK utility class does not yet support Scheduled Notifications. + 2017-11-15 23:41:13 - START RequestId: 68e0304f-ca1b-11e7-b735-81ebae95acda Version: $LATEST + +You can use the `-n` and `-f` command line flags just like the UNIX +`tail` command to view a larger number of log events and to continuously +poll for new events. The latter option can be useful in conjunction with +manually initiating Config Evaluations for your deploy Config Rule to +make sure it is behaving as expected. + +## Advanced Features + +### Cross-Account Deployments + +Features have been added to the RDK to facilitate the cross-account +deployment pattern that enterprise customers have standardized on for +custom Config Rules. A cross-account architecture is one in which the +Lambda functions are deployed to a single central "Compliance" account +(which may be the same as a central "Security" account), and the +Config Rules are deployed to any number of "Satellite" accounts that +are used by other teams or departments. This gives the compliance team +confidence that their Rule logic cannot be tampered with and makes it +much easier for them to modify rule logic without having to go through a +complex deployment process to potentially hundreds of AWS accounts. The +cross-account pattern uses two advanced RDK features - functions-only +deployments and the create-rule-template command. + +**Function-Only Deployment** + +By using the -f or --functions-only flag on +the deploy command the RDK will deploy only the necessary +Lambda Functions, Lambda Execution Role, and Lambda Permissions to the +account specified by the execution credentials. It accomplishes this by +batching up all of the Lambda function CloudFormation snippets for the +selected Rule(s) into a single dynamically generated template and deploy +that CloudFormation template. One consequence of this is that subsequent +deployments that specify a different set of Rules for the same stack +name will update that CloudFormation stack, and any Rules that were +included in the first deployment but not in the second will be removed. +You can use the --stack-name parameter to override the +default CloudFormation stack name if you need to manage different +subsets of your Lambda Functions independently. The intended usage is to +deploy the functions for all of the Config rules in the +Security/Compliance account, which can be done simply by using rdk +deploy -f --all from your working directory. + +**`create-rule-template` command** + +This command generates a CloudFormation template that defines the AWS +Config rules themselves, along with the Config Role, Config data bucket, +Configuration Recorder, and Delivery channel necessary for the Config +rules to work in a satellite account. You must specify the file name for +the generated template using the --output-file or +o command line flags. The generated template takes a +single parameter of the AccountID of the central compliance account that +contains the Lambda functions that will back your custom Config Rules. +The generated template can be deployed in the desired satellite accounts +through any of the means that you can deploy any other CloudFormation +template, including the console, the CLI, as a CodePipeline task, or +using StackSets. The create-rule-template command takes +all of the standard arguments for selecting Rules to include in the +generated template, including lists of individual Rule names, an +--all flag, or using the RuleSets feature described +below. + + rdk create-rule-template -o remote-rule-template.json --all + Generating CloudFormation template! + CloudFormation template written to remote-rule-template.json + +### RuleSets + +New as of version 0.3.11, it is possible to add RuleSet tags to rules +that can be used to deploy and test groups of rules together. Rules can +belong to multiple RuleSets, and RuleSet membership is stored only in +the parameters.json metadata. The deploy, +create-rule-template, and test-local +commands are RuleSet-aware such that a RuleSet can be passed in as the +target instead of --all or a specific named Rule. + +A comma-delimited list of RuleSets can be added to a Rule when you +create it (using the --rulesets flag), as part of a +modify command, or using new ruleset +subcommands to add or remove individual rules from a RuleSet. + +Running rdk rulesets list will display a list of the +RuleSets currently defined across all of the Rules in the working +directory + + rdk rulesets list + RuleSets: AnotherRuleSet MyNewSet + +Naming a specific RuleSet will list all of the Rules that are part of +that RuleSet. + + rdk rulesets list AnotherRuleSet + Rules in AnotherRuleSet : RSTest + +Rules can be added to or removed from RuleSets using the +add and remove subcommands: + + rdk rulesets add MyNewSet RSTest + RSTest added to RuleSet MyNewSet + + rdk rulesets remove AnotherRuleSet RSTest + RSTest removed from RuleSet AnotherRuleSet + +RuleSets are a convenient way to maintain a single repository of Config +Rules that may need to have subsets of them deployed to different +environments. For example your development environment may contain some +of the Rules that you run in Production but not all of them; RuleSets +gives you a way to identify and selectively deploy the appropriate Rules +to each environment. + +### Region Sets + +rdk init, rdk deploy, and rdk +undeploy subcommands now support running across multiple +regions in parallel using region sets defined in a yaml file. + +To run a subcommand with a region set, pass in the region set yaml file +and the specific region set to run through. + + rdk -f regions.yaml --region-set region-set-1 undeploy CUSTOM_RULE + Deleting rules in the following regions: ['sa-east-1', 'us-east-1']. + Delete specified Rules and Lambda Functions from your AWS Account? (y/N): y + [sa-east-1] Running un-deploy! + [us-east-1] Running un-deploy! + [us-east-1] Rule removal initiated. Waiting for Stack Deletion to complete. + [sa-east-1] Rule removal initiated. Waiting for Stack Deletion to complete. + [us-east-1] CloudFormation stack operation complete. + [us-east-1] Rule removal complete, but local files have been preserved. + [us-east-1] To re-deploy, use the 'deploy' command. + [sa-east-1] CloudFormation stack operation complete. + [sa-east-1] Rule removal complete, but local files have been preserved. + [sa-east-1] To re-deploy, use the 'deploy' command. + +Example region set file: + + default: + - us-west-1 + - us-west-2 + region-set-1: + - sa-east-1 + - us-east-1 + region-set-2: + - ap-southeast-1 + - eu-central-1 + - sa-east-1 + - us-east-1 diff --git a/docs/reference/test-local.rst b/docs/reference/test-local.rst deleted file mode 100644 index 63476a9e..00000000 --- a/docs/reference/test-local.rst +++ /dev/null @@ -1,10 +0,0 @@ -Test-Local ----------- - -.. argparse:: - :module: rdk - :func: get_test_local_parser - :prog: rdk test-local - :nodescription: - - Shorthand command for running the unit tests defined for Config Rules that use a Python runtime. When a Python 3.7+ Rule is created using the ``create`` command a unit test template is created in the Rule directory. This test boilerplate includes minimal tests, as well as a framework for using the ``unittest.mock`` library for stubbing out Boto3 calls. This allows more sophisticated test cases to be written for Periodic rules that need to make API calls to gather information about the environment. diff --git a/docs/requirements.txt b/docs/requirements.txt index 34fcb7dd..c1560e74 100644 --- a/docs/requirements.txt +++ b/docs/requirements.txt @@ -1,6 +1,299 @@ -Sphinx==1.7.8 -sphinx-argparse==0.2.5 -sphinx-rtd-theme==0.4.3 -sphinxcontrib-websupport==1.1.0 -PyYAML==5.4.1 -jinja2<3.1.0 +cached-property==1.5.2 ; python_full_version >= "3.7.2" and python_version < "3.8" \ + --hash=sha256:9fa5755838eecbb2d234c3aa390bd80fbd3ac6b6869109bfc1b499f7bd89a130 \ + --hash=sha256:df4f613cf7ad9a588cc381aaf4a512d26265ecebd5eb9e1ba12f1319eb85a6a0 +certifi==2022.12.7 ; python_full_version >= "3.7.2" and python_full_version < "4.0.0" \ + --hash=sha256:35824b4c3a97115964b408844d64aa14db1cc518f6562e8d7261699d1350a9e3 \ + --hash=sha256:4ad3232f5e926d6718ec31cfc1fcadfde020920e278684144551c91769c7bc18 +charset-normalizer==2.1.1 ; python_full_version >= "3.7.2" and python_full_version < "4.0.0" \ + --hash=sha256:5a3d016c7c547f69d6f81fb0db9449ce888b418b5b9952cc5e6e66843e9dd845 \ + --hash=sha256:83e9a75d1911279afd89352c68b45348559d1fc0506b054b346651b5e7fee29f +click==8.1.3 ; python_full_version >= "3.7.2" and python_full_version < "4.0.0" \ + --hash=sha256:7682dc8afb30297001674575ea00d1814d808d6a36af415a82bd481d37ba7b8e \ + --hash=sha256:bb4d8133cb15a609f44e8213d9b391b0809795062913b383c62be0ee95b1db48 +colorama==0.4.6 ; python_full_version >= "3.7.2" and python_full_version < "4.0.0" \ + --hash=sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44 \ + --hash=sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6 +ghp-import==2.1.0 ; python_full_version >= "3.7.2" and python_full_version < "4.0.0" \ + --hash=sha256:8337dd7b50877f163d4c0289bc1f1c7f127550241988d568c1db512c4324a619 \ + --hash=sha256:9c535c4c61193c2df8871222567d7fd7e5014d835f97dc7b7439069e2413d343 +griffe==0.28.2 ; python_full_version >= "3.7.2" and python_full_version < "4.0.0" \ + --hash=sha256:a471498b0b9505c721ea0e652fd77c97df1aeb56c4eb8c93d24bb1140da4216d \ + --hash=sha256:bde3a3dfa301a4b113c7fac3b2be45e5723bc50cda4c9cfe13f43c447c9aa5d1 +idna==3.4 ; python_full_version >= "3.7.2" and python_full_version < "4.0.0" \ + --hash=sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4 \ + --hash=sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2 +importlib-metadata==4.13.0 ; python_full_version >= "3.7.2" and python_version < "3.10" \ + --hash=sha256:8a8a81bcf996e74fee46f0d16bd3eaa382a7eb20fd82445c3ad11f4090334116 \ + --hash=sha256:dd0173e8f150d6815e098fd354f6414b0f079af4644ddfe90c71e2fc6174346d +jinja2==3.1.2 ; python_full_version >= "3.7.2" and python_full_version < "4.0.0" \ + --hash=sha256:31351a702a408a9e7595a8fc6150fc3f43bb6bf7e319770cbc0db9df9437e852 \ + --hash=sha256:6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61 +markdown-include==0.8.1 ; python_full_version >= "3.7.2" and python_full_version < "4.0.0" \ + --hash=sha256:1d0623e0fc2757c38d35df53752768356162284259d259c486b4ab6285cdbbe3 \ + --hash=sha256:32f0635b9cfef46997b307e2430022852529f7a5b87c0075c504283e7cc7db53 +markdown==3.3.7 ; python_full_version >= "3.7.2" and python_full_version < "4.0.0" \ + --hash=sha256:cbb516f16218e643d8e0a95b309f77eb118cb138d39a4f27851e6a63581db874 \ + --hash=sha256:f5da449a6e1c989a4cea2631aa8ee67caa5a2ef855d551c88f9e309f4634c621 +markupsafe==2.1.2 ; python_full_version >= "3.7.2" and python_full_version < "4.0.0" \ + --hash=sha256:0576fe974b40a400449768941d5d0858cc624e3249dfd1e0c33674e5c7ca7aed \ + --hash=sha256:085fd3201e7b12809f9e6e9bc1e5c96a368c8523fad5afb02afe3c051ae4afcc \ + --hash=sha256:090376d812fb6ac5f171e5938e82e7f2d7adc2b629101cec0db8b267815c85e2 \ + --hash=sha256:0b462104ba25f1ac006fdab8b6a01ebbfbce9ed37fd37fd4acd70c67c973e460 \ + --hash=sha256:137678c63c977754abe9086a3ec011e8fd985ab90631145dfb9294ad09c102a7 \ + --hash=sha256:1bea30e9bf331f3fef67e0a3877b2288593c98a21ccb2cf29b74c581a4eb3af0 \ + --hash=sha256:22152d00bf4a9c7c83960521fc558f55a1adbc0631fbb00a9471e097b19d72e1 \ + --hash=sha256:22731d79ed2eb25059ae3df1dfc9cb1546691cc41f4e3130fe6bfbc3ecbbecfa \ + --hash=sha256:2298c859cfc5463f1b64bd55cb3e602528db6fa0f3cfd568d3605c50678f8f03 \ + --hash=sha256:28057e985dace2f478e042eaa15606c7efccb700797660629da387eb289b9323 \ + --hash=sha256:2e7821bffe00aa6bd07a23913b7f4e01328c3d5cc0b40b36c0bd81d362faeb65 \ + --hash=sha256:2ec4f2d48ae59bbb9d1f9d7efb9236ab81429a764dedca114f5fdabbc3788013 \ + --hash=sha256:340bea174e9761308703ae988e982005aedf427de816d1afe98147668cc03036 \ + --hash=sha256:40627dcf047dadb22cd25ea7ecfe9cbf3bbbad0482ee5920b582f3809c97654f \ + --hash=sha256:40dfd3fefbef579ee058f139733ac336312663c6706d1163b82b3003fb1925c4 \ + --hash=sha256:4cf06cdc1dda95223e9d2d3c58d3b178aa5dacb35ee7e3bbac10e4e1faacb419 \ + --hash=sha256:50c42830a633fa0cf9e7d27664637532791bfc31c731a87b202d2d8ac40c3ea2 \ + --hash=sha256:55f44b440d491028addb3b88f72207d71eeebfb7b5dbf0643f7c023ae1fba619 \ + --hash=sha256:608e7073dfa9e38a85d38474c082d4281f4ce276ac0010224eaba11e929dd53a \ + --hash=sha256:63ba06c9941e46fa389d389644e2d8225e0e3e5ebcc4ff1ea8506dce646f8c8a \ + --hash=sha256:65608c35bfb8a76763f37036547f7adfd09270fbdbf96608be2bead319728fcd \ + --hash=sha256:665a36ae6f8f20a4676b53224e33d456a6f5a72657d9c83c2aa00765072f31f7 \ + --hash=sha256:6d6607f98fcf17e534162f0709aaad3ab7a96032723d8ac8750ffe17ae5a0666 \ + --hash=sha256:7313ce6a199651c4ed9d7e4cfb4aa56fe923b1adf9af3b420ee14e6d9a73df65 \ + --hash=sha256:7668b52e102d0ed87cb082380a7e2e1e78737ddecdde129acadb0eccc5423859 \ + --hash=sha256:7df70907e00c970c60b9ef2938d894a9381f38e6b9db73c5be35e59d92e06625 \ + --hash=sha256:7e007132af78ea9df29495dbf7b5824cb71648d7133cf7848a2a5dd00d36f9ff \ + --hash=sha256:835fb5e38fd89328e9c81067fd642b3593c33e1e17e2fdbf77f5676abb14a156 \ + --hash=sha256:8bca7e26c1dd751236cfb0c6c72d4ad61d986e9a41bbf76cb445f69488b2a2bd \ + --hash=sha256:8db032bf0ce9022a8e41a22598eefc802314e81b879ae093f36ce9ddf39ab1ba \ + --hash=sha256:99625a92da8229df6d44335e6fcc558a5037dd0a760e11d84be2260e6f37002f \ + --hash=sha256:9cad97ab29dfc3f0249b483412c85c8ef4766d96cdf9dcf5a1e3caa3f3661cf1 \ + --hash=sha256:a4abaec6ca3ad8660690236d11bfe28dfd707778e2442b45addd2f086d6ef094 \ + --hash=sha256:a6e40afa7f45939ca356f348c8e23048e02cb109ced1eb8420961b2f40fb373a \ + --hash=sha256:a6f2fcca746e8d5910e18782f976489939d54a91f9411c32051b4aab2bd7c513 \ + --hash=sha256:a806db027852538d2ad7555b203300173dd1b77ba116de92da9afbc3a3be3eed \ + --hash=sha256:abcabc8c2b26036d62d4c746381a6f7cf60aafcc653198ad678306986b09450d \ + --hash=sha256:b8526c6d437855442cdd3d87eede9c425c4445ea011ca38d937db299382e6fa3 \ + --hash=sha256:bb06feb762bade6bf3c8b844462274db0c76acc95c52abe8dbed28ae3d44a147 \ + --hash=sha256:c0a33bc9f02c2b17c3ea382f91b4db0e6cde90b63b296422a939886a7a80de1c \ + --hash=sha256:c4a549890a45f57f1ebf99c067a4ad0cb423a05544accaf2b065246827ed9603 \ + --hash=sha256:ca244fa73f50a800cf8c3ebf7fd93149ec37f5cb9596aa8873ae2c1d23498601 \ + --hash=sha256:cf877ab4ed6e302ec1d04952ca358b381a882fbd9d1b07cccbfd61783561f98a \ + --hash=sha256:d9d971ec1e79906046aa3ca266de79eac42f1dbf3612a05dc9368125952bd1a1 \ + --hash=sha256:da25303d91526aac3672ee6d49a2f3db2d9502a4a60b55519feb1a4c7714e07d \ + --hash=sha256:e55e40ff0cc8cc5c07996915ad367fa47da6b3fc091fdadca7f5403239c5fec3 \ + --hash=sha256:f03a532d7dee1bed20bc4884194a16160a2de9ffc6354b3878ec9682bb623c54 \ + --hash=sha256:f1cd098434e83e656abf198f103a8207a8187c0fc110306691a2e94a78d0abb2 \ + --hash=sha256:f2bfb563d0211ce16b63c7cb9395d2c682a23187f54c3d79bfec33e6705473c6 \ + --hash=sha256:f8ffb705ffcf5ddd0e80b65ddf7bed7ee4f5a441ea7d3419e861a12eaf41af58 +mergedeep==1.3.4 ; python_full_version >= "3.7.2" and python_full_version < "4.0.0" \ + --hash=sha256:0096d52e9dad9939c3d975a774666af186eda617e6ca84df4c94dec30004f2a8 \ + --hash=sha256:70775750742b25c0d8f36c55aed03d24c3384d17c951b3175d898bd778ef0307 +mkdocs-autorefs==0.4.1 ; python_full_version >= "3.7.2" and python_full_version < "4.0.0" \ + --hash=sha256:70748a7bd025f9ecd6d6feeba8ba63f8e891a1af55f48e366d6d6e78493aba84 \ + --hash=sha256:a2248a9501b29dc0cc8ba4c09f4f47ff121945f6ce33d760f145d6f89d313f5b +mkdocs-material-extensions==1.1.1 ; python_full_version >= "3.7.2" and python_full_version < "4.0.0" \ + --hash=sha256:9c003da71e2cc2493d910237448c672e00cefc800d3d6ae93d2fc69979e3bd93 \ + --hash=sha256:e41d9f38e4798b6617ad98ca8f7f1157b1e4385ac1459ca1e4ea219b556df945 +mkdocs-material==9.1.14 ; python_full_version >= "3.7.2" and python_full_version < "4.0.0" \ + --hash=sha256:1ae74cc5464ef2f64574d4884512efed7f4db386fb9bc6af20fd427d7a702f49 \ + --hash=sha256:b56a9f955ed32d38333715cbbf68ce38f683bf38610c65094fa4ef2db9f08bcd +mkdocs==1.4.3 ; python_full_version >= "3.7.2" and python_full_version < "4.0.0" \ + --hash=sha256:5955093bbd4dd2e9403c5afaf57324ad8b04f16886512a3ee6ef828956481c57 \ + --hash=sha256:6ee46d309bda331aac915cd24aab882c179a933bd9e77b80ce7d2eaaa3f689dd +mkdocstrings-python==1.0.0 ; python_full_version >= "3.7.2" and python_full_version < "4.0.0" \ + --hash=sha256:b89d849df990204f909d5452548b6936a185f912da06208a93909bebe25d6e67 \ + --hash=sha256:c59d67009a7a85172f4da990d8523e95606b6a1ff93a22a2351ad3b5f8cafed1 +mkdocstrings==0.21.2 ; python_full_version >= "3.7.2" and python_full_version < "4.0.0" \ + --hash=sha256:304e56a2e90595708a38a13a278e538a67ad82052dd5c8b71f77a604a4f3d911 \ + --hash=sha256:949ef8da92df9d692ca07be50616459a6b536083a25520fd54b00e8814ce019b +packaging==21.3 ; python_full_version >= "3.7.2" and python_full_version < "4.0.0" \ + --hash=sha256:dd47c42927d89ab911e606518907cc2d3a1f38bbd026385970643f9c5b8ecfeb \ + --hash=sha256:ef103e05f519cdc783ae24ea4e2e0f508a9c99b2d4969652eed6a2e1ea5bd522 +pygments==2.15.1 ; python_full_version >= "3.7.2" and python_full_version < "4.0.0" \ + --hash=sha256:8ace4d3c1dd481894b2005f560ead0f9f19ee64fe983366be1a21e171d12775c \ + --hash=sha256:db2db3deb4b4179f399a09054b023b6a586b76499d36965813c71aa8ed7b5fd1 +pymdown-extensions==10.0.1 ; python_full_version >= "3.7.2" and python_full_version < "4.0.0" \ + --hash=sha256:ae66d84013c5d027ce055693e09a4628b67e9dec5bce05727e45b0918e36f274 \ + --hash=sha256:b44e1093a43b8a975eae17b03c3a77aad4681b3b56fce60ce746dbef1944c8cb +pyparsing==3.0.9 ; python_full_version >= "3.7.2" and python_full_version < "4.0.0" \ + --hash=sha256:2b020ecf7d21b687f219b71ecad3631f644a47f01403fa1d1036b0c6416d70fb \ + --hash=sha256:5026bae9a10eeaefb61dab2f09052b9f4307d44aee4eda64b309723d8d206bbc +python-dateutil==2.8.2 ; python_full_version >= "3.7.2" and python_full_version < "4.0.0" \ + --hash=sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86 \ + --hash=sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9 +pyyaml-env-tag==0.1 ; python_full_version >= "3.7.2" and python_full_version < "4.0.0" \ + --hash=sha256:70092675bda14fdec33b31ba77e7543de9ddc88f2e5b99160396572d11525bdb \ + --hash=sha256:af31106dec8a4d68c60207c1886031cbf839b68aa7abccdb19868200532c2069 +pyyaml==6.0 ; python_full_version >= "3.7.2" and python_full_version < "4.0.0" \ + --hash=sha256:01b45c0191e6d66c470b6cf1b9531a771a83c1c4208272ead47a3ae4f2f603bf \ + --hash=sha256:0283c35a6a9fbf047493e3a0ce8d79ef5030852c51e9d911a27badfde0605293 \ + --hash=sha256:055d937d65826939cb044fc8c9b08889e8c743fdc6a32b33e2390f66013e449b \ + --hash=sha256:07751360502caac1c067a8132d150cf3d61339af5691fe9e87803040dbc5db57 \ + --hash=sha256:0b4624f379dab24d3725ffde76559cff63d9ec94e1736b556dacdfebe5ab6d4b \ + --hash=sha256:0ce82d761c532fe4ec3f87fc45688bdd3a4c1dc5e0b4a19814b9009a29baefd4 \ + --hash=sha256:1e4747bc279b4f613a09eb64bba2ba602d8a6664c6ce6396a4d0cd413a50ce07 \ + --hash=sha256:213c60cd50106436cc818accf5baa1aba61c0189ff610f64f4a3e8c6726218ba \ + --hash=sha256:231710d57adfd809ef5d34183b8ed1eeae3f76459c18fb4a0b373ad56bedcdd9 \ + --hash=sha256:277a0ef2981ca40581a47093e9e2d13b3f1fbbeffae064c1d21bfceba2030287 \ + --hash=sha256:2cd5df3de48857ed0544b34e2d40e9fac445930039f3cfe4bcc592a1f836d513 \ + --hash=sha256:40527857252b61eacd1d9af500c3337ba8deb8fc298940291486c465c8b46ec0 \ + --hash=sha256:432557aa2c09802be39460360ddffd48156e30721f5e8d917f01d31694216782 \ + --hash=sha256:473f9edb243cb1935ab5a084eb238d842fb8f404ed2193a915d1784b5a6b5fc0 \ + --hash=sha256:48c346915c114f5fdb3ead70312bd042a953a8ce5c7106d5bfb1a5254e47da92 \ + --hash=sha256:50602afada6d6cbfad699b0c7bb50d5ccffa7e46a3d738092afddc1f9758427f \ + --hash=sha256:68fb519c14306fec9720a2a5b45bc9f0c8d1b9c72adf45c37baedfcd949c35a2 \ + --hash=sha256:77f396e6ef4c73fdc33a9157446466f1cff553d979bd00ecb64385760c6babdc \ + --hash=sha256:81957921f441d50af23654aa6c5e5eaf9b06aba7f0a19c18a538dc7ef291c5a1 \ + --hash=sha256:819b3830a1543db06c4d4b865e70ded25be52a2e0631ccd2f6a47a2822f2fd7c \ + --hash=sha256:897b80890765f037df3403d22bab41627ca8811ae55e9a722fd0392850ec4d86 \ + --hash=sha256:98c4d36e99714e55cfbaaee6dd5badbc9a1ec339ebfc3b1f52e293aee6bb71a4 \ + --hash=sha256:9df7ed3b3d2e0ecfe09e14741b857df43adb5a3ddadc919a2d94fbdf78fea53c \ + --hash=sha256:9fa600030013c4de8165339db93d182b9431076eb98eb40ee068700c9c813e34 \ + --hash=sha256:a80a78046a72361de73f8f395f1f1e49f956c6be882eed58505a15f3e430962b \ + --hash=sha256:afa17f5bc4d1b10afd4466fd3a44dc0e245382deca5b3c353d8b757f9e3ecb8d \ + --hash=sha256:b3d267842bf12586ba6c734f89d1f5b871df0273157918b0ccefa29deb05c21c \ + --hash=sha256:b5b9eccad747aabaaffbc6064800670f0c297e52c12754eb1d976c57e4f74dcb \ + --hash=sha256:bfaef573a63ba8923503d27530362590ff4f576c626d86a9fed95822a8255fd7 \ + --hash=sha256:c5687b8d43cf58545ade1fe3e055f70eac7a5a1a0bf42824308d868289a95737 \ + --hash=sha256:cba8c411ef271aa037d7357a2bc8f9ee8b58b9965831d9e51baf703280dc73d3 \ + --hash=sha256:d15a181d1ecd0d4270dc32edb46f7cb7733c7c508857278d3d378d14d606db2d \ + --hash=sha256:d4b0ba9512519522b118090257be113b9468d804b19d63c71dbcf4a48fa32358 \ + --hash=sha256:d4db7c7aef085872ef65a8fd7d6d09a14ae91f691dec3e87ee5ee0539d516f53 \ + --hash=sha256:d4eccecf9adf6fbcc6861a38015c2a64f38b9d94838ac1810a9023a0609e1b78 \ + --hash=sha256:d67d839ede4ed1b28a4e8909735fc992a923cdb84e618544973d7dfc71540803 \ + --hash=sha256:daf496c58a8c52083df09b80c860005194014c3698698d1a57cbcfa182142a3a \ + --hash=sha256:dbad0e9d368bb989f4515da330b88a057617d16b6a8245084f1b05400f24609f \ + --hash=sha256:e61ceaab6f49fb8bdfaa0f92c4b57bcfbea54c09277b1b4f7ac376bfb7a7c174 \ + --hash=sha256:f84fbc98b019fef2ee9a1cb3ce93e3187a6df0b2538a651bfb890254ba9f90b5 +regex==2023.5.5 ; python_full_version >= "3.7.2" and python_full_version < "4.0.0" \ + --hash=sha256:02f4541550459c08fdd6f97aa4e24c6f1932eec780d58a2faa2068253df7d6ff \ + --hash=sha256:0a69cf0c00c4d4a929c6c7717fd918414cab0d6132a49a6d8fc3ded1988ed2ea \ + --hash=sha256:0bbd5dcb19603ab8d2781fac60114fb89aee8494f4505ae7ad141a3314abb1f9 \ + --hash=sha256:10250a093741ec7bf74bcd2039e697f519b028518f605ff2aa7ac1e9c9f97423 \ + --hash=sha256:10374c84ee58c44575b667310d5bbfa89fb2e64e52349720a0182c0017512f6c \ + --hash=sha256:1189fbbb21e2c117fda5303653b61905aeeeea23de4a94d400b0487eb16d2d60 \ + --hash=sha256:1307aa4daa1cbb23823d8238e1f61292fd07e4e5d8d38a6efff00b67a7cdb764 \ + --hash=sha256:144b5b017646b5a9392a5554a1e5db0000ae637be4971c9747566775fc96e1b2 \ + --hash=sha256:171c52e320fe29260da550d81c6b99f6f8402450dc7777ef5ced2e848f3b6f8f \ + --hash=sha256:18196c16a584619c7c1d843497c069955d7629ad4a3fdee240eb347f4a2c9dbe \ + --hash=sha256:18f05d14f14a812fe9723f13afafefe6b74ca042d99f8884e62dbd34dcccf3e2 \ + --hash=sha256:1ecf3dcff71f0c0fe3e555201cbe749fa66aae8d18f80d2cc4de8e66df37390a \ + --hash=sha256:21e90a288e6ba4bf44c25c6a946cb9b0f00b73044d74308b5e0afd190338297c \ + --hash=sha256:23d86ad2121b3c4fc78c58f95e19173790e22ac05996df69b84e12da5816cb17 \ + --hash=sha256:256f7f4c6ba145f62f7a441a003c94b8b1af78cee2cccacfc1e835f93bc09426 \ + --hash=sha256:290fd35219486dfbc00b0de72f455ecdd63e59b528991a6aec9fdfc0ce85672e \ + --hash=sha256:2e9c4f778514a560a9c9aa8e5538bee759b55f6c1dcd35613ad72523fd9175b8 \ + --hash=sha256:338994d3d4ca4cf12f09822e025731a5bdd3a37aaa571fa52659e85ca793fb67 \ + --hash=sha256:33d430a23b661629661f1fe8395be2004006bc792bb9fc7c53911d661b69dd7e \ + --hash=sha256:385992d5ecf1a93cb85adff2f73e0402dd9ac29b71b7006d342cc920816e6f32 \ + --hash=sha256:3d45864693351c15531f7e76f545ec35000d50848daa833cead96edae1665559 \ + --hash=sha256:40005cbd383438aecf715a7b47fe1e3dcbc889a36461ed416bdec07e0ef1db66 \ + --hash=sha256:4035d6945cb961c90c3e1c1ca2feb526175bcfed44dfb1cc77db4fdced060d3e \ + --hash=sha256:445d6f4fc3bd9fc2bf0416164454f90acab8858cd5a041403d7a11e3356980e8 \ + --hash=sha256:48c9ec56579d4ba1c88f42302194b8ae2350265cb60c64b7b9a88dcb7fbde309 \ + --hash=sha256:4a5059bd585e9e9504ef9c07e4bc15b0a621ba20504388875d66b8b30a5c4d18 \ + --hash=sha256:4a6e4b0e0531223f53bad07ddf733af490ba2b8367f62342b92b39b29f72735a \ + --hash=sha256:4b870b6f632fc74941cadc2a0f3064ed8409e6f8ee226cdfd2a85ae50473aa94 \ + --hash=sha256:50fd2d9b36938d4dcecbd684777dd12a407add4f9f934f235c66372e630772b0 \ + --hash=sha256:53e22e4460f0245b468ee645156a4f84d0fc35a12d9ba79bd7d79bdcd2f9629d \ + --hash=sha256:586a011f77f8a2da4b888774174cd266e69e917a67ba072c7fc0e91878178a80 \ + --hash=sha256:59597cd6315d3439ed4b074febe84a439c33928dd34396941b4d377692eca810 \ + --hash=sha256:59e4b729eae1a0919f9e4c0fc635fbcc9db59c74ad98d684f4877be3d2607dd6 \ + --hash=sha256:5a0f874ee8c0bc820e649c900243c6d1e6dc435b81da1492046716f14f1a2a96 \ + --hash=sha256:5ac2b7d341dc1bd102be849d6dd33b09701223a851105b2754339e390be0627a \ + --hash=sha256:5e3f4468b8c6fd2fd33c218bbd0a1559e6a6fcf185af8bb0cc43f3b5bfb7d636 \ + --hash=sha256:6164d4e2a82f9ebd7752a06bd6c504791bedc6418c0196cd0a23afb7f3e12b2d \ + --hash=sha256:6893544e06bae009916a5658ce7207e26ed17385149f35a3125f5259951f1bbe \ + --hash=sha256:690a17db524ee6ac4a27efc5406530dd90e7a7a69d8360235323d0e5dafb8f5b \ + --hash=sha256:6b8d0c153f07a953636b9cdb3011b733cadd4178123ef728ccc4d5969e67f3c2 \ + --hash=sha256:72a28979cc667e5f82ef433db009184e7ac277844eea0f7f4d254b789517941d \ + --hash=sha256:72aa4746993a28c841e05889f3f1b1e5d14df8d3daa157d6001a34c98102b393 \ + --hash=sha256:732176f5427e72fa2325b05c58ad0b45af341c459910d766f814b0584ac1f9ac \ + --hash=sha256:7918a1b83dd70dc04ab5ed24c78ae833ae8ea228cef84e08597c408286edc926 \ + --hash=sha256:7923470d6056a9590247ff729c05e8e0f06bbd4efa6569c916943cb2d9b68b91 \ + --hash=sha256:7d76a8a1fc9da08296462a18f16620ba73bcbf5909e42383b253ef34d9d5141e \ + --hash=sha256:811040d7f3dd9c55eb0d8b00b5dcb7fd9ae1761c454f444fd9f37fe5ec57143a \ + --hash=sha256:821a88b878b6589c5068f4cc2cfeb2c64e343a196bc9d7ac68ea8c2a776acd46 \ + --hash=sha256:84397d3f750d153ebd7f958efaa92b45fea170200e2df5e0e1fd4d85b7e3f58a \ + --hash=sha256:844671c9c1150fcdac46d43198364034b961bd520f2c4fdaabfc7c7d7138a2dd \ + --hash=sha256:890a09cb0a62198bff92eda98b2b507305dd3abf974778bae3287f98b48907d3 \ + --hash=sha256:8f08276466fedb9e36e5193a96cb944928301152879ec20c2d723d1031cd4ddd \ + --hash=sha256:8f5e06df94fff8c4c85f98c6487f6636848e1dc85ce17ab7d1931df4a081f657 \ + --hash=sha256:921473a93bcea4d00295799ab929522fc650e85c6b9f27ae1e6bb32a790ea7d3 \ + --hash=sha256:941b3f1b2392f0bcd6abf1bc7a322787d6db4e7457be6d1ffd3a693426a755f2 \ + --hash=sha256:9b320677521aabf666cdd6e99baee4fb5ac3996349c3b7f8e7c4eee1c00dfe3a \ + --hash=sha256:9c3efee9bb53cbe7b285760c81f28ac80dc15fa48b5fe7e58b52752e642553f1 \ + --hash=sha256:9fda3e50abad8d0f48df621cf75adc73c63f7243cbe0e3b2171392b445401550 \ + --hash=sha256:a4c5da39bca4f7979eefcbb36efea04471cd68db2d38fcbb4ee2c6d440699833 \ + --hash=sha256:a56c18f21ac98209da9c54ae3ebb3b6f6e772038681d6cb43b8d53da3b09ee81 \ + --hash=sha256:a623564d810e7a953ff1357f7799c14bc9beeab699aacc8b7ab7822da1e952b8 \ + --hash=sha256:a8906669b03c63266b6a7693d1f487b02647beb12adea20f8840c1a087e2dfb5 \ + --hash=sha256:a99757ad7fe5c8a2bb44829fc57ced11253e10f462233c1255fe03888e06bc19 \ + --hash=sha256:aa7d032c1d84726aa9edeb6accf079b4caa87151ca9fabacef31fa028186c66d \ + --hash=sha256:aad5524c2aedaf9aa14ef1bc9327f8abd915699dea457d339bebbe2f0d218f86 \ + --hash=sha256:afb1c70ec1e594a547f38ad6bf5e3d60304ce7539e677c1429eebab115bce56e \ + --hash=sha256:b6365703e8cf1644b82104cdd05270d1a9f043119a168d66c55684b1b557d008 \ + --hash=sha256:b8b942d8b3ce765dbc3b1dad0a944712a89b5de290ce8f72681e22b3c55f3cc8 \ + --hash=sha256:ba73a14e9c8f9ac409863543cde3290dba39098fc261f717dc337ea72d3ebad2 \ + --hash=sha256:bd7b68fd2e79d59d86dcbc1ccd6e2ca09c505343445daaa4e07f43c8a9cc34da \ + --hash=sha256:bd966475e963122ee0a7118ec9024388c602d12ac72860f6eea119a3928be053 \ + --hash=sha256:c2ce65bdeaf0a386bb3b533a28de3994e8e13b464ac15e1e67e4603dd88787fa \ + --hash=sha256:c64d5abe91a3dfe5ff250c6bb267ef00dbc01501518225b45a5f9def458f31fb \ + --hash=sha256:c8c143a65ce3ca42e54d8e6fcaf465b6b672ed1c6c90022794a802fb93105d22 \ + --hash=sha256:cd46f30e758629c3ee91713529cfbe107ac50d27110fdcc326a42ce2acf4dafc \ + --hash=sha256:ced02e3bd55e16e89c08bbc8128cff0884d96e7f7a5633d3dc366b6d95fcd1d6 \ + --hash=sha256:cf123225945aa58b3057d0fba67e8061c62d14cc8a4202630f8057df70189051 \ + --hash=sha256:d19e57f888b00cd04fc38f5e18d0efbd91ccba2d45039453ab2236e6eec48d4d \ + --hash=sha256:d1cbe6b5be3b9b698d8cc4ee4dee7e017ad655e83361cd0ea8e653d65e469468 \ + --hash=sha256:db09e6c18977a33fea26fe67b7a842f706c67cf8bda1450974d0ae0dd63570df \ + --hash=sha256:de2f780c3242ea114dd01f84848655356af4dd561501896c751d7b885ea6d3a1 \ + --hash=sha256:e2205a81f815b5bb17e46e74cc946c575b484e5f0acfcb805fb252d67e22938d \ + --hash=sha256:e645c757183ee0e13f0bbe56508598e2d9cd42b8abc6c0599d53b0d0b8dd1479 \ + --hash=sha256:f2910502f718828cecc8beff004917dcf577fc5f8f5dd40ffb1ea7612124547b \ + --hash=sha256:f764e4dfafa288e2eba21231f455d209f4709436baeebb05bdecfb5d8ddc3d35 \ + --hash=sha256:f83fe9e10f9d0b6cf580564d4d23845b9d692e4c91bd8be57733958e4c602956 \ + --hash=sha256:fb2b495dd94b02de8215625948132cc2ea360ae84fe6634cd19b6567709c8ae2 \ + --hash=sha256:fee0016cc35a8a91e8cc9312ab26a6fe638d484131a7afa79e1ce6165328a135 +requests==2.31.0 ; python_full_version >= "3.7.2" and python_full_version < "4.0.0" \ + --hash=sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f \ + --hash=sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1 +six==1.16.0 ; python_full_version >= "3.7.2" and python_full_version < "4.0.0" \ + --hash=sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926 \ + --hash=sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254 +typing-extensions==4.4.0 ; python_full_version >= "3.7.2" and python_version < "3.10" \ + --hash=sha256:1511434bb92bf8dd198c12b1cc812e800d4181cfcb867674e0f8279cc93087aa \ + --hash=sha256:16fa4864408f655d35ec496218b85f79b3437c829e93320c7c9215ccfd92489e +urllib3==1.26.13 ; python_full_version >= "3.7.2" and python_full_version < "4.0.0" \ + --hash=sha256:47cc05d99aaa09c9e72ed5809b60e7ba354e64b59c9c173ac3018642d8bb41fc \ + --hash=sha256:c083dd0dce68dbfbe1129d5271cb90f9447dea7d52097c6e0126120c521ddea8 +watchdog==3.0.0 ; python_full_version >= "3.7.2" and python_full_version < "4.0.0" \ + --hash=sha256:0e06ab8858a76e1219e68c7573dfeba9dd1c0219476c5a44d5333b01d7e1743a \ + --hash=sha256:13bbbb462ee42ec3c5723e1205be8ced776f05b100e4737518c67c8325cf6100 \ + --hash=sha256:233b5817932685d39a7896b1090353fc8efc1ef99c9c054e46c8002561252fb8 \ + --hash=sha256:25f70b4aa53bd743729c7475d7ec41093a580528b100e9a8c5b5efe8899592fc \ + --hash=sha256:2b57a1e730af3156d13b7fdddfc23dea6487fceca29fc75c5a868beed29177ae \ + --hash=sha256:336adfc6f5cc4e037d52db31194f7581ff744b67382eb6021c868322e32eef41 \ + --hash=sha256:3aa7f6a12e831ddfe78cdd4f8996af9cf334fd6346531b16cec61c3b3c0d8da0 \ + --hash=sha256:3ed7c71a9dccfe838c2f0b6314ed0d9b22e77d268c67e015450a29036a81f60f \ + --hash=sha256:4c9956d27be0bb08fc5f30d9d0179a855436e655f046d288e2bcc11adfae893c \ + --hash=sha256:4d98a320595da7a7c5a18fc48cb633c2e73cda78f93cac2ef42d42bf609a33f9 \ + --hash=sha256:4f94069eb16657d2c6faada4624c39464f65c05606af50bb7902e036e3219be3 \ + --hash=sha256:5113334cf8cf0ac8cd45e1f8309a603291b614191c9add34d33075727a967709 \ + --hash=sha256:51f90f73b4697bac9c9a78394c3acbbd331ccd3655c11be1a15ae6fe289a8c83 \ + --hash=sha256:5d9f3a10e02d7371cd929b5d8f11e87d4bad890212ed3901f9b4d68767bee759 \ + --hash=sha256:7ade88d0d778b1b222adebcc0927428f883db07017618a5e684fd03b83342bd9 \ + --hash=sha256:7c5f84b5194c24dd573fa6472685b2a27cc5a17fe5f7b6fd40345378ca6812e3 \ + --hash=sha256:7e447d172af52ad204d19982739aa2346245cc5ba6f579d16dac4bfec226d2e7 \ + --hash=sha256:8ae9cda41fa114e28faf86cb137d751a17ffd0316d1c34ccf2235e8a84365c7f \ + --hash=sha256:8f3ceecd20d71067c7fd4c9e832d4e22584318983cabc013dbf3f70ea95de346 \ + --hash=sha256:9fac43a7466eb73e64a9940ac9ed6369baa39b3bf221ae23493a9ec4d0022674 \ + --hash=sha256:a70a8dcde91be523c35b2bf96196edc5730edb347e374c7de7cd20c43ed95397 \ + --hash=sha256:adfdeab2da79ea2f76f87eb42a3ab1966a5313e5a69a0213a3cc06ef692b0e96 \ + --hash=sha256:ba07e92756c97e3aca0912b5cbc4e5ad802f4557212788e72a72a47ff376950d \ + --hash=sha256:c07253088265c363d1ddf4b3cdb808d59a0468ecd017770ed716991620b8f77a \ + --hash=sha256:c9d8c8ec7efb887333cf71e328e39cffbf771d8f8f95d308ea4125bf5f90ba64 \ + --hash=sha256:d00e6be486affb5781468457b21a6cbe848c33ef43f9ea4a73b4882e5f188a44 \ + --hash=sha256:d429c2430c93b7903914e4db9a966c7f2b068dd2ebdd2fa9b9ce094c7d459f33 +zipp==3.11.0 ; python_full_version >= "3.7.2" and python_version < "3.10" \ + --hash=sha256:83a28fcb75844b5c0cdaf5aa4003c2d728c77e05f5aeabe8e95e56727005fbaa \ + --hash=sha256:a7a22e05929290a67401440b39690ae6563279bced5f314609d9d03798f56766 From 5d42db21779bf2830c71bfd23a4f3a24b53c07d8 Mon Sep 17 00:00:00 2001 From: Benjamin Morris <93620006+bmorrissirromb@users.noreply.github.com> Date: Wed, 21 Jun 2023 16:02:44 -0700 Subject: [PATCH 23/23] Update README.md with todo --- README.md | 2 ++ 1 file changed, 2 insertions(+) diff --git a/README.md b/README.md index dd2bec3c..d755f1c7 100644 --- a/README.md +++ b/README.md @@ -118,3 +118,5 @@ rdk>=1,<2 - Makefile can be replaced by poetry's poethepoet taskrunner - Makefiles are misused! - Look to eks-cluster-upgrade for example + +- Review any other files that should be kept/removed, such as examples, workshops, and guidelines like minimum required permissions.