diff --git a/README.md b/README.md index 519bf256..7cae430d 100644 --- a/README.md +++ b/README.md @@ -1,14 +1,14 @@ -# rdk +# AWS RDK [![pypibadge](https://static.pepy.tech/personalized-badge/rdk?period=total&units=international_system&left_color=black&right_color=blue&left_text=downloads)](https://pepy.tech/project/rdk) ![PyPI](https://img.shields.io/pypi/v/rdk) -Rule Development Kit +AWS Config Rules Development Kit We greatly appreciate feedback and bug reports at ! You may also create an issue on this repo. -The RDK is designed to support a \"Compliance-as-Code\" workflow that is +The RDK is designed to support a "Compliance-as-Code" workflow that is intuitive and productive. It abstracts away much of the undifferentiated heavy lifting associated with deploying AWS Config rules backed by custom lambda functions, and provides a streamlined @@ -17,7 +17,7 @@ develop-deploy-monitor iterative process. For complete documentation, including command reference, check out the [ReadTheDocs documentation](https://rdk.readthedocs.io/en/latest/). -# Getting Started +## Getting Started Uses python 3.7/3.8/3.9 and is installed via pip. Requires you to have an AWS account and sufficient permissions to manage the Config service, @@ -33,7 +33,9 @@ or pass them in with the command-line parameters `--profile`, If you just want to use the RDK, go ahead and install it using pip. - $ pip install rdk +```bash +pip install rdk +``` Alternately, if you want to see the code and/or contribute you can clone the git repo, and then from the repo directory use pip to install the @@ -45,67 +47,73 @@ need to have Java 8 and gradle installed. If you are going to author your Lambda functions in C# you will need to have the dotnet CLI and the .NET Core Runtime 1.08 installed. - $ pip install -e . +```bash +pip install -e . +``` To make sure the rdk is installed correctly, running the package from the command line without any arguments should display help information. - $ rdk - usage: rdk [-h] [-p PROFILE] [-k ACCESS_KEY_ID] [-s SECRET_ACCESS_KEY] - [-r REGION] [-f REGION_FILE] [--region-set REGION_SET] - [-v] ... - rdk: error: the following arguments are required: , +```bash +rdk +usage: rdk [-h] [-p PROFILE] [-k ACCESS_KEY_ID] [-s SECRET_ACCESS_KEY] + [-r REGION] [-f REGION_FILE] [--region-set REGION_SET] + [-v] ... +rdk: error: the following arguments are required: , +``` -# Usage +## Usage -## Configure your env +### Configure your env -To use the RDK, it\'s recommended to create a directory that will be +To use the RDK, it's recommended to create a directory that will be your working directory. This should be committed to a source code repo, and ideally created as a python virtualenv. In that directory, run the `init` command to set up your AWS Config environment. - $ rdk init - Running init! - Creating Config bucket config-bucket-780784666283 - Creating IAM role config-role - Waiting for IAM role to propagate - Config Service is ON - Config setup complete. - Creating Code bucket config-rule-code-bucket-780784666283ap-southeast-1 +```bash +rdk init +Running init! +Creating Config bucket config-bucket-780784666283 +Creating IAM role config-role +Waiting for IAM role to propagate +Config Service is ON +Config setup complete. +Creating Code bucket config-rule-code-bucket-780784666283ap-southeast-1 +``` Running `init` subsequent times will validate your AWS Config setup and re-create any S3 buckets or IAM resources that are needed. -- If you have config delivery bucket already present in some other AWS - account then use `--config-bucket-exists-in-another-account` as - argument. +- If you have config delivery bucket already present in some other AWS account then use `--config-bucket-exists-in-another-account` as argument. - $ rdk init --config-bucket-exists-in-another-account +```bash +rdk init --config-bucket-exists-in-another-account +``` -- If you have AWS Organizations/ControlTower Setup in your AWS - environment then additionally, use `--control-tower` as argument. +- If you have AWS Organizations/ControlTower Setup in your AWS environment then additionally, use `--control-tower` as argument. - $ rdk init --control-tower --config-bucket-exists-in-another-account +```bash +rdk init --control-tower --config-bucket-exists-in-another-account +``` -- If bucket for custom lambda code is already present in current - account then use `--skip-code-bucket-creation` argument. +- If bucket for custom lambda code is already present in current account then use `--skip-code-bucket-creation` argument. - $ rdk init --skip-code-bucket-creation +```bash +rdk init --skip-code-bucket-creation +``` -- If you want rdk to create/update and upload the rdklib-layer for - you, then use `--generate-lambda-layer` argument. In supported - regions, rdk will deploy the layer using the Serverless Application - Repository, otherwise it will build a local lambda layer archive and - upload it for use. +- If you want rdk to create/update and upload the rdklib-layer for you, then use `--generate-lambda-layer` argument. In supported regions, rdk will deploy the layer using the Serverless Application Repository, otherwise it will build a local lambda layer archive and upload it for use. - $ rdk init --generate-lambda-layer +```bash +rdk init --generate-lambda-layer +``` -- If you want rdk to give a custom name to the lambda layer for you, - then use `--custom-layer-namer` argument. The Serverless Application - Repository currently cannot be used for custom lambda layers. +- If you want rdk to give a custom name to the lambda layer for you, then use `--custom-layer-namer` argument. The Serverless Application Repository currently cannot be used for custom lambda layers. - $ rdk init --generate-lambda-layer --custom-layer-name +```bash +rdk init --generate-lambda-layer --custom-layer-name +``` ## Create Rules @@ -117,22 +125,24 @@ frequency for a periodic rule. This will add a new directory for the rule and populate it with several files, including a skeleton of your Lambda code. - $ rdk create MyRule --runtime python3.8 --resource-types AWS::EC2::Instance --input-parameters '{"desiredInstanceType":"t2.micro"}' - Running create! - Local Rule files created. +```bash +rdk create MyRule --runtime python3.8 --resource-types AWS::EC2::Instance --input-parameters '{"desiredInstanceType":"t2.micro"}' +Running create! +Local Rule files created. +``` On Windows it is necessary to escape the double-quotes when specifying input parameters, so the `--input-parameters` argument would instead look something like this: - '{\"desiredInstanceType\":\"t2.micro\"}' +`'{\"desiredInstanceType\":\"t2.micro\"}'` Note that you can create rules that use EITHER resource-types OR maximum-frequency, but not both. We have found that rules that try to be both event-triggered as well as periodic wind up being very complicated and so we do not recommend it as a best practice. -## Edit Rules Locally +### Edit Rules Locally Once you have created the rule, edit the python file in your rule directory (in the above example it would be `MyRule/MyRule.py`, but may @@ -141,23 +151,24 @@ runtime) to add whatever logic your Rule requires in the `evaluate_compliance` function. You will have access to the CI that was sent by Config, as well as any parameters configured for the Config Rule. Your function should return either a simple compliance status (one -of `COMPLIANT`, `NON_COMPLIANT`, or `NOT_APPLICABLE`), or if you\'re +of `COMPLIANT`, `NON_COMPLIANT`, or `NOT_APPLICABLE`), or if you're using the python or node runtimes you can return a JSON object with multiple evaluation responses that the RDK will send back to AWS Config. -An example would look like - for sg in response['SecurityGroups']: - evaluations.append( - { - 'ComplianceResourceType': 'AWS::EC2::SecurityGroup', - 'ComplianceResourceId': sg['GroupId'], - 'ComplianceType': 'COMPLIANT', - 'Annotation': 'This is an important note.', - 'OrderingTimestamp': str(datetime.datetime.now()) - }) - - - return evaluations +An example would look like: + +```python +for sg in response['SecurityGroups']: + evaluations.append( + { + 'ComplianceResourceType': 'AWS::EC2::SecurityGroup', + 'ComplianceResourceId': sg['GroupId'], + 'ComplianceType': 'COMPLIANT', + 'Annotation': 'This is an important note.', + 'OrderingTimestamp': str(datetime.datetime.now()) + }) +return evaluations +``` This is necessary for periodic rules that are not triggered by any CI change (which means the CI that is passed in will be null), and also for @@ -166,11 +177,13 @@ attaching annotations to your evaluation results. If you want to see what the JSON structure of a CI looks like for creating your logic, you can use - $ rdk sample-ci +```bash +rdk sample-ci +``` to output a formatted JSON document. -## Write and Run Unit Tests +### Write and Run Unit Tests If you are writing Config Rules using either of the Python runtimes there will be a `_test.py` file deployed along with your @@ -179,47 +192,51 @@ to the standard Python unittest framework (documented [here](https://docs.python.org/3/library/unittest.html)), which can be run using the `test-local` rdk command: - $ rdk test-local MyTestRule - Running local test! - Testing MyTestRule - Looking for tests in /Users/mborch/Code/rdk-dev/MyTestRule +```bash +rdk test-local MyTestRule +Running local test! +Testing MyTestRule +Looking for tests in /Users/mborch/Code/rdk-dev/MyTestRule - --------------------------------------------------------------------- +--------------------------------------------------------------------- - Ran 0 tests in 0.000s +Ran 0 tests in 0.000s - OK - +OK + +``` The test file includes setup for the MagicMock library that can be used to stub boto3 API calls if your rule logic will involve making API calls to gather additional information about your AWS environment. For some tips on how to do this, check out this blog post: - +[Mock Is Magic](https://sgillies.net/2017/10/19/mock-is-magic.html) -## Modify Rule +### Modify Rule If you need to change the parameters of a Config rule in your working directory you can use the `modify` command. Any parameters you specify will overwrite existing values, any that you do not specify will not be changed. - $ rdk modify MyRule --runtime python3.9 --maximum-frequency TwentyFour_Hours --input-parameters '{"desiredInstanceType":"t2.micro"}' - Running modify! - Modified Rule 'MyRule'. Use the `deploy` command to push your changes to AWS. +```bash +rdk modify MyRule --runtime python3.9 --maximum-frequency TwentyFour_Hours --input-parameters '{"desiredInstanceType":"t2.micro"}' +Running modify! +Modified Rule 'MyRule'. Use the `deploy` command to push your changes to AWS. +``` Again, on Windows the input parameters would look like: - '{\"desiredInstanceType\":\"t2.micro\"}' +`'{\"desiredInstanceType\":\"t2.micro\"}'` It is worth noting that until you actually call the `deploy` command your rule only exists in your working directory, none of the Rule commands discussed thus far actually makes changes to your account. -## Deploy Rule +### Deploy Rule Once you have completed your compliance validation code and set your -Rule\'s configuration, you can deploy the Rule to your account using the +Rule's configuration, you can deploy the Rule to your account using the `deploy` command. This will zip up your code (and the other associated code files, if any) into a deployable package (or run a gradle build if you have selected the java8 runtime or run the lambda packaging step @@ -232,46 +249,43 @@ the `deploy` command will also directly update the Lambda function for any subsequent deployments to make sure code changes are propagated correctly. - $ rdk deploy MyRule - Running deploy! - Zipping MyRule - Uploading MyRule - Creating CloudFormation Stack for MyRule - Waiting for CloudFormation stack operation to complete... - ... - Waiting for CloudFormation stack operation to complete... - Config deploy complete. +```bash +rdk deploy MyRule +Running deploy! +Zipping MyRule +Uploading MyRule +Creating CloudFormation Stack for MyRule +Waiting for CloudFormation stack operation to complete... +... +Waiting for CloudFormation stack operation to complete... +Config deploy complete. +``` The exact output will vary depending on Lambda runtime. You can use the `--all` flag to deploy all of the rules in your working directory. If you used the `--generate-lambda-layer` flag in rdk init, use the `--generated-lambda-layer` flag for rdk deploy. -## Deploy Organization Rule +### Deploy Organization Rule You can also deploy the Rule to your AWS Organization using the `deploy-organization` command. For successful evaluation of custom rules in child accounts, please make sure you do one of the following: -1. Set ASSUME_ROLE_MODE in Lambda code to True, to get the lambda to - assume the Role attached on the Config Service and confirm that the - role trusts the master account where the Lambda function is going to - be deployed. -2. Set ASSUME_ROLE_MODE in Lambda code to True, to get the lambda to - assume a custom role and define an optional parameter with key as - ExecutionRoleName and set the value to your custom role name; - confirm that the role trusts the master account of the organization - where the Lambda function will be deployed. - - $ rdk deploy-organization MyRule - Running deploy! - Zipping MyRule - Uploading MyRule - Creating CloudFormation Stack for MyRule - Waiting for CloudFormation stack operation to complete... - ... - Waiting for CloudFormation stack operation to complete... - Config deploy complete. +1. Set ASSUME_ROLE_MODE in Lambda code to True, to get the lambda to assume the Role attached on the Config Service and confirm that the role trusts the master account where the Lambda function is going to be deployed. +2. Set ASSUME_ROLE_MODE in Lambda code to True, to get the lambda to assume a custom role and define an optional parameter with key as ExecutionRoleName and set the value to your custom role name; confirm that the role trusts the master account of the organization where the Lambda function will be deployed. + +```bash +rdk deploy-organization MyRule +Running deploy! +Zipping MyRule +Uploading MyRule +Creating CloudFormation Stack for MyRule +Waiting for CloudFormation stack operation to complete... +... +Waiting for CloudFormation stack operation to complete... +Config deploy complete. +``` The exact output will vary depending on Lambda runtime. You can use the `--all` flag to deploy all of the rules in your working directory. This @@ -281,20 +295,21 @@ account. When an account leaves an organization, the rule is removed. Deployment of existing organizational AWS Config Rules will only be retried for 7 hours after an account is added to your organization if a recorder is not available. You are expected to create a recorder if one -doesn\'t exist within 7 hours of adding an account to your organization. +doesn't exist within 7 hours of adding an account to your organization. -## View Logs For Deployed Rule +### View Logs For Deployed Rule Once the Rule has been deployed to AWS you can get the CloudWatch logs associated with your lambda function using the `logs` command. - $ rdk logs MyRule -n 5 - 2017-11-15 22:59:33 - START RequestId: 96e7639a-ca15-11e7-95a2-b1521890638d Version: $LATEST - 2017-11-15 23:41:13 - REPORT RequestId: 68e0304f-ca1b-11e7-b735-81ebae95acda Duration: 0.50 ms Billed Duration: 100 ms Memory Size: 256 MB - Max Memory Used: 36 MB - 2017-11-15 23:41:13 - END RequestId: 68e0304f-ca1b-11e7-b735-81ebae95acda - 2017-11-15 23:41:13 - Default RDK utility class does not yet support Scheduled Notifications. - 2017-11-15 23:41:13 - START RequestId: 68e0304f-ca1b-11e7-b735-81ebae95acda Version: $LATEST +```bash +rdk logs MyRule -n 5 +2017-11-15 22:59:33 - START RequestId: 96e7639a-ca15-11e7-95a2-b1521890638d Version: $LATEST +2017-11-15 23:41:13 - REPORT RequestId: 68e0304f-ca1b-11e7-b735-81ebae95acda Duration: 0.50 ms Billed Duration: 100 ms Memory Size: 256 MB Max Memory Used: 36 MB +2017-11-15 23:41:13 - END RequestId: 68e0304f-ca1b-11e7-b735-81ebae95acda +2017-11-15 23:41:13 - Default RDK utility class does not yet support Scheduled Notifications. +2017-11-15 23:41:13 - START RequestId: 68e0304f-ca1b-11e7-b735-81ebae95acda Version: $LATEST +``` You can use the `-n` and `-f` command line flags just like the UNIX `tail` command to view a larger number of log events and to continuously @@ -302,7 +317,7 @@ poll for new events. The latter option can be useful in conjunction with manually initiating Config Evaluations for your deploy Config Rule to make sure it is behaving as expected. -# Running the tests +## Running the tests The `testing` directory contains scripts and buildspec files that I use to run basic functionality tests across a variety of CLI environments @@ -311,26 +326,26 @@ running python3.9). If there is interest I can release a CloudFormation template that could be used to build the test environment, let me know if this is something you want! -# Advanced Features +## Advanced Features -## Cross-Account Deployments +### Cross-Account Deployments Features have been added to the RDK to facilitate the cross-account deployment pattern that enterprise customers have standardized for custom Config Rules. A cross-account architecture is one in which the -Lambda functions are deployed to a single central \"Compliance\" account -(which may be the same as a central \"Security\" account), and the -Config Rules are deployed to any number of \"Satellite\" accounts that +Lambda functions are deployed to a single central "Compliance" account +(which may be the same as a central "Security" account), and the +Config Rules are deployed to any number of "Satellite" accounts that are used by other teams or departments. This gives the compliance team confidence that their rule logic cannot be tampered with and makes it much easier for them to modify rule logic without having to go through a complex deployment process to potentially hundreds of AWS accounts. The cross-account pattern uses two advanced RDK features -- Functions-only deployment -- create-rule-template command +- Functions-only deployment +- create-rule-template command -**Functions-Only Deployment** +#### Functions-Only Deployment By using the `-f` or `--functions-only` flag on the `deploy` command the RDK will deploy only the necessary Lambda Functions, Lambda Execution @@ -348,13 +363,13 @@ independently. The intended usage is to deploy the functions for all of the Config rules in the Security/Compliance account, which can be done simply by using `rdk deploy -f --all` from your working directory. -**create-rule-template command** +#### create-rule-template command This command generates a CloudFormation template that defines the AWS Config rules themselves, along with the Config Role, Config data bucket, Configuration Recorder, and Delivery channel necessary for the Config rules to work in a satellite account. You must specify the file name for -the generated template using the [\--output-file]{.title-ref} or +the generated template using the [--output-file]{.title-ref} or [o]{.title-ref} command line flags. The generated template takes a single parameter of the AccountID of the central compliance account that contains the Lambda functions that will back your custom Config Rules. @@ -366,44 +381,49 @@ standard arguments for selecting Rules to include in the generated template, including lists of individual Rule names, an `--all` flag, or using the RuleSets feature described below. - $ rdk create-rule-template -o remote-rule-template.json --all - Generating CloudFormation template! - CloudFormation template written to remote-rule-template.json +```bash +rdk create-rule-template -o remote-rule-template.json --all +Generating CloudFormation template! +CloudFormation template written to remote-rule-template.json +``` -## Disable the supported resource types check +### Disable the supported resource types check It is now possible to define a resource type that is not yet supported by rdk. To disable the supported resource check use the optional flag -\'\--skip-supported-resource-check\' during the create command. +'--skip-supported-resource-check' during the create command. - $ rdk create MyRule --runtime python3.8 --resource-types AWS::New::ResourceType --skip-supported-resource-check - 'AWS::New::ResourceType' not found in list of accepted resource types. - Skip-Supported-Resource-Check Flag set (--skip-supported-resource-check), ignoring missing resource type error. - Running create! - Local Rule files created. +```bash +rdk create MyRule --runtime python3.8 --resource-types AWS::New::ResourceType --skip-supported-resource-check +'AWS::New::ResourceType' not found in list of accepted resource types. +Skip-Supported-Resource-Check Flag set (--skip-supported-resource-check), ignoring missing resource type error. +Running create! +Local Rule files created. +``` -## Custom Lambda Function Name +### Custom Lambda Function Name As of version 0.7.14, instead of defaulting the lambda function names to `RDK-Rule-Function-` it is possible to customize the name for -the Lambda function to any 64 characters string as per Lambda\'s naming +the Lambda function to any 64 characters string as per Lambda's naming standards using the optional `--custom-lambda-name` flag while performing `rdk create`. This opens up new features like : -1. Longer config rule name. -2. Custom lambda function naming as per personal or enterprise - standards. +1. Longer config rule name. +2. Custom lambda function naming as per personal or enterprise standards. - $ rdk create MyLongerRuleName --runtime python3.8 --resource-types AWS::EC2::Instance --custom-lambda-name custom-prefix-for-MyLongerRuleName - Running create! - Local Rule files created. +```bash +rdk create MyLongerRuleName --runtime python3.8 --resource-types AWS::EC2::Instance --custom-lambda-name custom-prefix-for-MyLongerRuleName +Running create! +Local Rule files created. +``` The above example would create files with config rule name as `MyLongerRuleName` and lambda function with the name `custom-prefix-for-MyLongerRuleName` instead of `RDK-Rule-Function-MyLongerRuleName` -## RuleSets +### RuleSets New as of version 0.3.11, it is possible to add RuleSet tags to rules that can be used to deploy and test groups of rules together. Rules can @@ -411,7 +431,7 @@ belong to multiple RuleSets, and RuleSet membership is stored only in the parameters.json metadata. The [deploy]{.title-ref}, [create-rule-template]{.title-ref}, and [test-local]{.title-ref} commands are RuleSet-aware such that a RuleSet can be passed in as the -target instead of [\--all]{.title-ref} or a specific named Rule. +target instead of [--all]{.title-ref} or a specific named Rule. A comma-delimited list of RuleSets can be added to a Rule when you create it (using the `--rulesets` flag), as part of a `modify` command, @@ -421,23 +441,29 @@ from a RuleSet. Running `rdk rulesets list` will display a list of the RuleSets currently defined across all of the Rules in the working directory - rdk-dev $ rdk rulesets list - RuleSets: AnotherRuleSet MyNewSet +```bash +rdk rulesets list +RuleSets: AnotherRuleSet MyNewSet +``` Naming a specific RuleSet will list all of the Rules that are part of that RuleSet. - rdk-dev $ rdk rulesets list AnotherRuleSet - Rules in AnotherRuleSet : RSTest +```bash +rdk rulesets list AnotherRuleSet +Rules in AnotherRuleSet : RSTest +``` Rules can be added to or removed from RuleSets using the `add` and `remove` subcommands: - rdk-dev $ rdk rulesets add MyNewSet RSTest - RSTest added to RuleSet MyNewSet +```bash +rdk rulesets add MyNewSet RSTest +RSTest added to RuleSet MyNewSet - rdk-dev $ rdk rulesets remove AnotherRuleSet RSTest - RSTest removed from RuleSet AnotherRuleSet +rdk rulesets remove AnotherRuleSet RSTest +RSTest removed from RuleSet AnotherRuleSet +``` RuleSets are a convenient way to maintain a single repository of Config Rules that may need to have subsets of them deployed to different @@ -446,7 +472,7 @@ of the Rules that you run in Production but not all of them; RuleSets gives you a way to identify and selectively deploy the appropriate Rules to each environment. -## Managed Rules +### Managed Rules The RDK is able to deploy AWS Managed Rules. @@ -456,15 +482,15 @@ Managed Rules can be found [here](https://docs.aws.amazon.com/config/latest/developerguide/managed-rules-by-aws-config.html) , and note that the Identifier can be obtained by replacing the dashes with underscores and using all capitals (for example, the -\"guardduty-enabled-centralized\" rule has the SourceIdentifier -\"GUARDDUTY_ENABLED_CENTRALIZED\"). Just like custom Rules you will need +"guardduty-enabled-centralized" rule has the SourceIdentifier +"GUARDDUTY_ENABLED_CENTRALIZED"). Just like custom Rules you will need to specify source events and/or a maximum evaluation frequency, and also pass in any Rule parameters. The resulting Rule directory will contain only the parameters.json file, but using `rdk deploy` or `rdk create-rule-template` can be used to deploy the Managed Rule like any other Custom Rule. -## Deploying Rules Across Multiple Regions +### Deploying Rules Across Multiple Regions The RDK is able to run init/deploy/undeploy across multiple regions with a `rdk -f -t ` @@ -477,7 +503,7 @@ specify the filename, add the `-o ` this will create a region set with the following tests and regions `"default":["us-east-1","us-west-1","eu-north-1","ap-east-1"],"aws-cn-region-set":["cn-north-1","cn-northwest-1"]` -## Using RDK to Generate a Lambda Layer in a region (Python3) +### Using RDK to Generate a Lambda Layer in a region (Python3) By default `rdk init --generate-lambda-layer` will generate an rdklib lambda layer while running init in whatever region it is run, to force @@ -492,7 +518,7 @@ If you created layer with a custom name (by running `rdk init --custom-lambda-layer`, add a similar `custom-lambda-layer` flag when running deploy. -# Support & Feedback +## Support & Feedback This project is maintained by AWS Solution Architects and Consultants. It is not part of an AWS service and support is provided best-effort by @@ -500,45 +526,37 @@ the maintainers. To post feedback, submit feature ideas, or report bugs, please use the [Issues section](https://github.com/awslabs/aws-config-rdk/issues) of this repo. -# Contributing +## Contributing email us at if you have any questions. We are happy to help and discuss. -# Contacts - -- **Ricky Chau** - [rickychau2780](https://github.com/rickychau2780) - - *current maintainer* -- **Benjamin Morris** - - [bmorrissirromb](https://github.com/bmorrissirromb) - *current - maintainer* -- **Mark Beacom** - [mbeacom](https://github.com/mbeacom) - *current - maintainer* -- **Julio Delgado Jr** - [tekdj7](https://github.com/tekdj7) - - *current maintainer* - -# Past Contributors - -- **Michael Borchert** - *Original Python version* -- **Jonathan Rault** - *Original Design, testing, feedback* -- **Greg Kim and Chris Gutierrez** - *Initial work and CI definitions* -- **Henry Huang** - *Original CFN templates and other code* -- **Santosh Kumar** - *maintainer* -- **Jose Obando** - *maintainer* -- **Jarrett Andrulis** - - [jarrettandrulis](https://github.com/jarrettandrulis) - *maintainer* -- **Sandeep Batchu** - [batchus](https://github.com/batchus) - - *maintainer* - -# License +## Contacts + +- **Benjamin Morris** - [bmorrissirromb](https://github.com/bmorrissirromb) - *current maintainer* +- **Julio Delgado Jr** - [tekdj7](https://github.com/tekdj7) - *current maintainer* + +## Past Contributors + +- **Michael Borchert** - *Original Python version* +- **Jonathan Rault** - *Original Design, testing, feedback* +- **Greg Kim and Chris Gutierrez** - *Initial work and CI definitions* +- **Henry Huang** - *Original CFN templates and other code* +- **Santosh Kumar** - *maintainer* +- **Jose Obando** - *maintainer* +- **Jarrett Andrulis** - [jarrettandrulis](https://github.com/jarrettandrulis) - *maintainer* +- **Sandeep Batchu** - [batchus](https://github.com/batchus) - *maintainer* +- **Mark Beacom** - [mbeacom](https://github.com/mbeacom) - *maintainer* +- **Ricky Chau** - [rickychau2780](https://github.com/rickychau2780) - *maintainer* + +## License This project is licensed under the Apache 2.0 License -# Acknowledgments +## Acknowledgments -- the boto3 team makes all of this magic possible. +- the boto3 team makes all of this magic possible. -# Link +## Link -- to view example of rules built with the RDK: - +- to view example of rules built with the RDK: [https://github.com/awslabs/aws-config-rules/tree/master/python](https://github.com/awslabs/aws-config-rules/tree/master/python) diff --git a/docs/Makefile b/docs/Makefile deleted file mode 100644 index 404b1e58..00000000 --- a/docs/Makefile +++ /dev/null @@ -1,20 +0,0 @@ -# Minimal makefile for Sphinx documentation -# - -# You can set these variables from the command line. -SPHINXOPTS = -SPHINXBUILD = sphinx-build -SPHINXPROJ = RDK -SOURCEDIR = . -BUILDDIR = _build - -# Put it first so that "make" without argument is like "make help". -help: - @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) - -.PHONY: help Makefile - -# Catch-all target: route all unknown targets to Sphinx using the new -# "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). -%: Makefile - @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) \ No newline at end of file diff --git a/docs/_static/argparse.css b/docs/_static/argparse.css deleted file mode 100644 index 0c679a52..00000000 --- a/docs/_static/argparse.css +++ /dev/null @@ -1,13 +0,0 @@ -/* override table width restrictions */ -/*@media screen and (min-width: 767px) { - .wy-table-responsive table td { - white-space: normal !important; - } - .wy-table-responsive { - overflow: visible !important; - } -} - -.highlight { - overflow: visible !important -}*/ diff --git a/docs/commands/clean.md b/docs/commands/clean.md new file mode 100644 index 00000000..beae816b --- /dev/null +++ b/docs/commands/clean.md @@ -0,0 +1,7 @@ +# Clean + +The `clean` command is the inverse of the `init` command, and can be +used to completely remove Config resources from an account, including +the Configuration Recorder, Delivery Channel, S3 buckets, Roles, and +Permissions. This is useful for testing account provisioning automation +and for running automated tests in a clean environment. diff --git a/docs/commands/create-rule-template.md b/docs/commands/create-rule-template.md new file mode 100644 index 00000000..4a119a5c --- /dev/null +++ b/docs/commands/create-rule-template.md @@ -0,0 +1,28 @@ +# Create-Rule-Template + +Generates and saves to a file a single CloudFormation template that can +be used to deploy the specified Rule(s) into any account. This feature +has two primary uses: + +- Multi-account Config setup in which the Lambda Functions for custom rules are deployed into a centralized \"security\" or \"compliance\" account and the Config Rules themselves are deployed into \"application\" or \"satellite\" accounts. +- Combine many Config Rules into a single CloudFormation template for easier atomic deployment and management. + +The generated CloudFormation template includes a Parameter for the +AccountID that contains the Lambda functions that provide the compliance +logic for the Rules, and also exposes all of the Config Rule input +parameters as CloudFormation stack parameters. + +By default the generated CloudFormation template will set up Config as +per the settings used by the RDK `init` command, but those resources can +be omitted using the `--rules-only` flag. + +The `--config-role-arn` flag can be used for assigning existing config +role to the created Configuration Recorder. The +`-t | --tag-config-rules-script ` can now be used for output +the script generated for create tags for each config rule. + +As of version 0.6, RDK supports Config remediation. Note that in order +to use SSM documents for remediation you must supply all of the +necessary document parameters. These can be found in the SSM document +listing on the AWS console, but RDK will *not* validate at rule creation +that you have all of the necessary parameters supplied. diff --git a/docs/commands/create.md b/docs/commands/create.md new file mode 100644 index 00000000..a9a205fc --- /dev/null +++ b/docs/commands/create.md @@ -0,0 +1,7 @@ +# Create + +As of version 0.6, RDK supports Config remediation. Note that in order +to use SSM documents for remediation you must supply all of the +necessary document parameters. These can be found in the SSM document +listing on the AWS console, but RDK will *not* validate at rule creation +that you have all of the necessary parameters supplied. diff --git a/docs/commands/deploy.md b/docs/commands/deploy.md new file mode 100644 index 00000000..43c9f8f9 --- /dev/null +++ b/docs/commands/deploy.md @@ -0,0 +1,47 @@ +# Deploy + +This command will deploy the specified Rule(s) to the Account and Region +determined by the credentials being used to execute the command, and the +value of the AWS_DEFAULT_REGION environment variable, unless those +credentials or region are overridden using the common flags. + +Once deployed, RDK will **not** explicitly start a Rule evaluation. +Depending on the changes being made to your Config Rule setup AWS Config +may re-evaluate the deployed Rules automatically, or you can run an +evaluation using the AWS configservice CLI. + +The `--functions-only` flag can be used as part of a multi-account +deployment strategy to push **only** the Lambda functions (and +necessary Roles and Permssions) to the target account. This is intended +to be used in conjunction with the `create-rule-template` command in +order to separate the compliance logic from the evaluated accounts. For +an example of how this looks in practice, check out the [AWS +Compliance-as-Code +Engine](https://github.com/awslabs/aws-config-engine-for-compliance-as-code/). +The `--rdklib-layer-arn` flag can be used for attaching Lambda Layer ARN +that contains the desired rdklib. Note that Lambda Layers are +region-specific. The `--lambda-role-arn` flag can be used for assigning +existing iam role to all Lambda functions created for Custom Config +Rules. The `--lambda-layers` flag can be used for attaching a +comma-separated list of Lambda Layer ARNs to deploy with your Lambda +function(s). The `--lambda-subnets` flag can be used for attaching a +comma-separated list of Subnets to deploy your Lambda function(s). The +`--lambda-security-groups` flag can be used for attaching a +comma-separated list of Security Groups to deploy with your Lambda +function(s). The `--custom-code-bucket` flag can be used for providing +the custom code S3 bucket name, which is not created with rdk init, for +generated cloudformation template storage. The `--boundary-policy-arn` +flag can be used for attaching boundary Policy ARN that will be added to +rdkLambdaRole. The `--lambda-timeout` flag can be used for specifying +the timeout associated to the lambda function + +Note: Behind the scenes the `--functions-only` flag generates a +CloudFormation template and runs a \"create\" or \"update\" on the +targeted AWS Account and Region. If subsequent calls to `deploy` with +the `--functions-only` flag are made with the same stack name (either +the default or otherwise) but with *different Config rules targeted*, +any Rules deployed in previous `deploy`s but not included in the latest +`deploy` will be removed. After a functions-only `deploy` **only** the +Rules specifically targeted by that command (either through Rulesets or +an explicit list supplied on the command line) will be deployed in the +environment, all others will be removed.s diff --git a/docs/commands/export.md b/docs/commands/export.md new file mode 100644 index 00000000..2165b48e --- /dev/null +++ b/docs/commands/export.md @@ -0,0 +1,19 @@ +# Export + +This command will export the specified Rule(s) to the terraform file, it +supports the terraform versions 0.11 and 0.12. + +The `--format` flag can be used to specify export format, currently it +supports only terraform. The `--version` flag can be used to specify the +terraform version. The `--rdklib-layer-arn` flag can be used for +attaching Lambda Layer ARN that contains the desired rdklib. Note that +Lambda Layers are region-specific. The `--lambda-role-arn` flag can be +used for assigning existing iam role to all Lambda functions created for +Custom Config Rules. The `--lambda-layers` flag can be used for +attaching a comma-separated list of Lambda Layer ARNs to deploy with +your Lambda function(s). The `--lambda-subnets` flag can be used for +attaching a comma-separated list of Subnets to deploy your Lambda +function(s). The `--lambda-security-groups` flag can be used for +attaching a comma-separated list of Security Groups to deploy with your +Lambda function(s). The `--lambda-timeout` flag can be used for +specifying the timeout associated to the lambda function diff --git a/docs/commands/init.md b/docs/commands/init.md new file mode 100644 index 00000000..4292ebab --- /dev/null +++ b/docs/commands/init.md @@ -0,0 +1,27 @@ +# Init + +Sets up the AWS Config Service in an AWS Account. This includes: + +- Config Configuration Recorder +- Config Delivery Channel +- IAM Role for Delivery Channel +- S3 Bucket for Configuration Snapshots +- S3 Bucket for Lambda Code + +Additionally, `init` will make sure that the Configuration Recorder is +on and functioning, that the Delivery Channel has the appropriate Role +attached, and that the Delivery Channel Role has the proper permissions. + +Note: Even without Config Rules running the Configuration Recorder is +still capturing Configuration Item snapshots and storing them in S3, so +running `init` will incur AWS charges! + +Also Note: AWS Config is a regional service, so running `init` will only +set up Config in the region currently specified in your +AWS_DEFAULT_REGION environment variable or in the `--region` flag. + +Advanced Options: + +- `--config-bucket-exists-in-another-account`: \[optional\] If the bucket being used by a Config Delivery Channel exists in another account, it is possible to skip the check that the bucket exists. This is useful when using `init` to initialize AWS Config in an account which already has a delivery channel setup with a central bucket. Currently, the rdk lists out all the buckets within the account your are running `init` from, to check if the provided bucket name exists, if it doesn\'t then it will create it. This presents an issue when a Config Delivery Channel has been configured to push configuration recordings to a central bucket. The bucket will never be found as it doesn\'t exist in the same account, but cannot be created as bucket names have to be globally unique. +- `--skip-code-bucket-creation`: \[optional\] If you want to use custom code bucket for rdk, enable this and use flag `--custom-code-bucket` to `rdk deploy` +- `control-tower`: \[optional\] If your account is part of an AWS Control Tower setup \--control-tower will skip the setup of configuration_recorder and delivery_channel diff --git a/docs/commands/logs.md b/docs/commands/logs.md new file mode 100644 index 00000000..6c8e2033 --- /dev/null +++ b/docs/commands/logs.md @@ -0,0 +1,14 @@ +# Logs + +The `logs` command provides a shortcut to accessing the CloudWatch Logs +output from the Lambda Functions that back your custom Config Rules. +Logs are displayed in chronological order going back the number of log +entries specified by the `--number` flag (default 3). It supports a +`--follow` flag similar to the UNIX command `tail` so that you can +choose to continually poll CloudWatch to deliver new log items as they +are delivered by your Lambda function. + +In addition to any output that your function emits via `print()` or +`console.log()` commands, Lambda will also record log lines for the +start and stop of each Lambda invocation, including the runtime and +memory usage. diff --git a/docs/commands/modify.md b/docs/commands/modify.md new file mode 100644 index 00000000..98b0a528 --- /dev/null +++ b/docs/commands/modify.md @@ -0,0 +1,7 @@ +# Modify + +Used to modify the local metadata for Config Rules created by the RDK. +This command takes the same arguments as the `create` command (all of +them optional), and overwrites the Rule metadata for any flag specified. +Changes made using `modify` are not automatically pushed out to your AWS +Account, and must be deployed as usual using the `deploy` command. diff --git a/docs/commands/rulesets.md b/docs/commands/rulesets.md new file mode 100644 index 00000000..c4fc6743 --- /dev/null +++ b/docs/commands/rulesets.md @@ -0,0 +1,27 @@ +# Rulesets + +Rulesets provide a mechanism to tag individual Config Rules into groups +that can be acted on as a unit. Ruleset tags are single keywords, and +the commands `deploy`, `create-rule-template`, and `undeploy` can all +expand Ruleset parameters and operate on the resulting list of Rules. + +The most common use-case for Rulesets is to define standardized Account +metadata or data classifications, and then tag individual Rules to all +of the appropriate metadata tags or classification levels. + +Example: If you have Account classifications of \"Public\", \"Private\", +and \"Restricted\" you can tag all of your Rules as \"Restricted\", and +a subset of them that deal with private network security as \"Private\". +Then when you need to deploy controls to a new \"Private\" account you +can simply use `rdk create-rule-template --rulesets Private` to generate +a CloudFormation template that includes all of the Rules necessary for +your \"Private\" classification, but omit the Rules that are only +necessary for \"Restricted\" accounts. Additionally, as your compliance +requirements change and you add Config Rules you can tag them as +appropriate, re-generate your CloudFormation templates, and re-deploy to +make sure your Accounts are all up-to-date. + +You may also choose to classify accounts using binary attributes +(\"Prod\" vs. \"Non-Prod\" or \"PCI\" vs. \"Non-PCI\"), and then +generate account-specific CloudFormation templates using the Account +metadata to ensure that the appropriate controls are deployed. diff --git a/docs/commands/sample-ci.md b/docs/commands/sample-ci.md new file mode 100644 index 00000000..eaf93d81 --- /dev/null +++ b/docs/commands/sample-ci.md @@ -0,0 +1,17 @@ +# Sample-CI + +This utility command outputs a sample Configuration Item for the +specified resource type. This can be useful when writing new custom +Config Rules to help developers know what the CI structure and plausible +values for the resource type are. + +Note that you can construct Config Evaluations for any resource type +that is supported by CloudFormation, however you can not create +change-triggered Config Rules for resource types not explicitly +supported by Config, and some of the console functionality in AWS Config +may be limited. + +[CFN-supported +resources](https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/cfn-supported-resources.html) +[Config-supported +resources](https://docs.aws.amazon.com/config/latest/developerguide/resource-config-reference.html) diff --git a/docs/commands/test-local.md b/docs/commands/test-local.md new file mode 100644 index 00000000..6ece0657 --- /dev/null +++ b/docs/commands/test-local.md @@ -0,0 +1,9 @@ +# Test-Local + +Shorthand command for running the unit tests defined for Config Rules +that use a Python runtime. When a Python 3.7+ Rule is created using the +`create` command a unit test template is created in the Rule directory. +This test boilerplate includes minimal tests, as well as a framework for +using the `unittest.mock` library for stubbing out Boto3 calls. This +allows more sophisticated test cases to be written for Periodic rules +that need to make API calls to gather information about the environment. diff --git a/docs/commands/undeploy.md b/docs/commands/undeploy.md new file mode 100644 index 00000000..83922f4e --- /dev/null +++ b/docs/commands/undeploy.md @@ -0,0 +1,10 @@ +# Undeploy + +The inverse of `deploy`, this command is used to remove a Config Rule +and its Lambda Function from the targeted account. + +This is intended to be used primarily for clean-up for testing +deployment automation (perhaps from a CI/CD pipeline) to ensure that it +works from an empty account, or to clean up a test account during +development. See also the [clean](./clean.html) command if you want to +more thoroughly scrub Config from your account. diff --git a/docs/conf.py b/docs/conf.py deleted file mode 100644 index 5c608007..00000000 --- a/docs/conf.py +++ /dev/null @@ -1,179 +0,0 @@ -# Configuration file for the Sphinx documentation builder. -# -# This file does only contain a selection of the most common options. For a -# full list see the documentation: -# http://www.sphinx-doc.org/en/master/config - -# -- Path setup -------------------------------------------------------------- - -# If extensions (or modules to document with autodoc) are in another directory, -# add these directories to sys.path here. If the directory is relative to the -# documentation root, use os.path.abspath to make it absolute, like shown here. -# -import os -import sys - -sys.path.insert(0, os.path.abspath(".")) -sys.path.insert(0, os.path.abspath("../")) -sys.path.insert(0, os.path.abspath("../rdk/")) - - -# -- Project information ----------------------------------------------------- - -project = "RDK" -copyright = "2017-2023 Amazon.com, Inc. or its affiliates. All Rights Reserved" -author = "RDK Maintainers" - -# The short X.Y version -version = "" -# The full version, including alpha/beta/rc tags -release = "1.0" - -on_rtd = os.environ.get("READTHEDOCS", None) == "True" - -if not on_rtd: # only import and set the theme if we're building docs locally, tested with sphinx-rtd-theme==0.4.3 - import sphinx_rtd_theme - - html_theme = "sphinx_rtd_theme" - html_theme_path = [sphinx_rtd_theme.get_html_theme_path()] - -# -- General configuration --------------------------------------------------- - -# If your documentation needs a minimal Sphinx version, state it here. -# -# needs_sphinx = '1.0' - -# Add any Sphinx extension module names here, as strings. They can be -# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom -# ones. -extensions = [ - "sphinx.ext.autodoc", - "sphinx.ext.intersphinx", - "sphinx.ext.todo", - "sphinx.ext.viewcode", - "sphinx.ext.githubpages", - "sphinxarg.ext", -] - -# Add any paths that contain templates here, relative to this directory. -templates_path = ["_templates"] - -# The suffix(es) of source filenames. -# You can specify multiple suffix as a list of string: -# -# source_suffix = ['.rst', '.md'] -source_suffix = ".md" - -# The master toctree document. -master_doc = "index" - -# The language for content autogenerated by Sphinx. Refer to documentation -# for a list of supported languages. -# -# This is also used if you do content translation via gettext catalogs. -# Usually you set "language" from the command line for these cases. -language = None - -# List of patterns, relative to source directory, that match files and -# directories to ignore when looking for source files. -# This pattern also affects html_static_path and html_extra_path . -exclude_patterns = ["_build", "Thumbs.db", ".DS_Store"] - -# The name of the Pygments (syntax highlighting) style to use. -pygments_style = "sphinx" - - -# -- Options for HTML output ------------------------------------------------- - -# The theme to use for HTML and HTML Help pages. See the documentation for -# a list of builtin themes. -# -# html_theme = 'default' - -# Theme options are theme-specific and customize the look and feel of a theme -# further. For a list of options available for each theme, see the -# documentation. -# -# html_theme_options = {} - -# Add any paths that contain custom static files (such as style sheets) here, -# relative to this directory. They are copied after the builtin static files, -# so a file named "default.css" will overwrite the builtin "default.css". -html_static_path = ["_static"] - -html_context = { - "css_files": [ - "_static/theme_overrides.css", # override wide tables in RTD theme - ], -} - -# Custom sidebar templates, must be a dictionary that maps document names -# to template names. -# -# The default sidebars (for documents that don't match any pattern) are -# defined by theme itself. Builtin themes are using these templates by -# default: ``['localtoc.html', 'relations.html', 'sourcelink.html', -# 'searchbox.html']``. -# -# html_sidebars = {} - - -# -- Options for HTMLHelp output --------------------------------------------- - -# Output file base name for HTML help builder. -htmlhelp_basename = "RDKdoc" - - -# -- Options for LaTeX output ------------------------------------------------ - -latex_elements = { - # The paper size ('letterpaper' or 'a4paper'). - # - # 'papersize': 'letterpaper', - # The font size ('10pt', '11pt' or '12pt'). - # - # 'pointsize': '10pt', - # Additional stuff for the LaTeX preamble. - # - # 'preamble': '', - # Latex figure (float) alignment - # - # 'figure_align': 'htbp', -} - -# Grouping the document tree into LaTeX files. List of tuples -# (source start file, target name, title, -# author, documentclass [howto, manual, or own class]). -latex_documents = [ - (master_doc, "RDK.tex", "RDK Documentation", "RDK Maintainers", "manual"), -] - - -# -- Options for manual page output ------------------------------------------ - -# One entry per manual page. List of tuples -# (source start file, name, description, authors, manual section). -man_pages = [(master_doc, "rdk", "RDK Documentation", [author], 1)] - - -# -- Options for Texinfo output ---------------------------------------------- - -# Grouping the document tree into Texinfo files. List of tuples -# (source start file, target name, title, author, -# dir menu entry, description, category) -texinfo_documents = [ - (master_doc, "RDK", "RDK Documentation", author, "RDK", "One line description of project.", "Miscellaneous"), -] - - -# -- Extension configuration ------------------------------------------------- - -# -- Options for intersphinx extension --------------------------------------- - -# Example configuration for intersphinx: refer to the Python standard library. -intersphinx_mapping = {"https://docs.python.org/": None} - -# -- Options for todo extension ---------------------------------------------- - -# If true, `todo` and `todoList` produce output, else they produce nothing. -todo_include_todos = True diff --git a/docs/getting_started.rst b/docs/getting_started.rst deleted file mode 100644 index 4ae573cd..00000000 --- a/docs/getting_started.rst +++ /dev/null @@ -1,262 +0,0 @@ -Getting Started -=============== - -Let's get started using the RDK! - -Prerequisites -------------- - -RDK uses python 3.7+. You will need to have an AWS account and sufficient permissions to manage the Config service, and to create and manage S3 Buckets, Roles, and Lambda Functions. An AWS IAM Policy Document that describes the minimum necessary permissions can be found `here `_ on github. - -Under the hood, rdk uses boto3 to make API calls to AWS, so you can set your credentials any way that boto3 recognizes (options 3 through 8 in the `boto docs here `_ ) or pass them in with the command-line parameters --profile, --region, --access-key-id, or --secret-access-key - -.. _permissions: http://www.python.org/ - -Installation ------------- - -If you just want to use the RDK, go ahead and install it using pip:: - -$ pip install rdk - -Alternately, if you want to see the code and/or contribute you can clone the `git repo `_ , and then from the repo directory use pip to install the package. Use the '-e' flag to generate symlinks so that any edits you make will be reflected when you run the installed package. - -If you are going to author your Lambda functions using Java you will need to have Java 8 and gradle installed. If you are going to author your Lambda functions in C# you will need to have the dotnet CLI and the .NET Core Runtime 1.08 installed. -:: - - $ pip install -e . - -To make sure the rdk is installed correctly, running the package from the command line without any arguments should display help information. - -:: - - $ rdk - usage: rdk [-h] [-p PROFILE] [-k ACCESS_KEY] [-s SECRET_ACCESS_KEY] - [-r REGION] - ... - rdk: error: the following arguments are required: , - - -Usage ------ - -Configure your env -~~~~~~~~~~~~~~~~~~ -To use the RDK, it's recommended to create a directory that will be your working directory. This should be committed to a source code repo, and ideally created as a python virtualenv. In that directory, run the ``init`` command to set up your AWS Config environment. - -:: - - $ rdk init - Running init! - Creating Config bucket config-bucket-780784666283 - Creating IAM role config-role - Waiting for IAM role to propagate - Config Service is ON - Config setup complete. - Creating Code bucket config-rule-code-bucket-780784666283ap-southeast-1 - -Running ``init`` subsequent times will validate your AWS Config setup and re-create any S3 buckets or IAM resources that are needed. - -Create Rules -~~~~~~~~~~~~ -In your working directory, use the ``create`` command to start creating a new custom rule. You must specify the runtime for the lambda function that will back the Rule, and you can also specify a resource type (or comma-separated list of types) that the Rule will evaluate or a maximum frequency for a periodic rule. This will add a new directory for the rule and populate it with several files, including a skeleton of your Lambda code. - -:: - - $ rdk create MyRule --runtime python3.8 --resource-types AWS::EC2::Instance --input-parameters '{"desiredInstanceType":"t2.micro"}' - Running create! - Local Rule files created. - -On Windows it is necessary to escape the double-quotes when specifying input parameters, so the `--input-parameters` argument would instead look something like this:: - - '{\"desiredInstanceType\":\"t2.micro\"}' - -Note that you can create rules that use EITHER resource-types OR maximum-frequency, but not both. We have found that rules that try to be both event-triggered as well as periodic wind up being very complicated and so we do not recommend it as a best practice. - -Edit Rules Locally -~~~~~~~~~~~~~~~~~~ -Once you have created the rule, edit the python file in your rule directory (in the above example it would be ``MyRule/MyRule.py``, but may be deeper into the rule directory tree depending on your chosen Lambda runtime) to add whatever logic your Rule requires in the ``evaluate_compliance`` function. You will have access to the CI that was sent by Config, as well as any parameters configured for the Config Rule. Your function should return either a simple compliance status (one of ``COMPLIANT``, ``NONCOMPLIANT``, or ``NOT_APPLICABLE``), or if you're using the python or node runtimes you can return a JSON object with multiple evaluation responses that the RDK will send back to AWS Config. An example would look like:: - - for sg in response['SecurityGroups']: - evaluations.append( - { - 'ComplianceResourceType': 'AWS::EC2::SecurityGroup', - 'ComplianceResourceId': sg['GroupId'], - 'ComplianceType': 'COMPLIANT', - 'Annotation': 'This is an important note.', - 'OrderingTimestamp': str(datetime.datetime.now()) - }) - - - return evaluations - -This is necessary for periodic rules that are not triggered by any CI change (which means the CI that is passed in will be null), and also for attaching annotations to your evaluation results. - -If you want to see what the JSON structure of a CI looks like for creating your logic, you can use - -:: - -$ rdk sample-ci - -to output a formatted JSON document. - -Write and Run Unit Tests -~~~~~~~~~~~~~~~~~~~~~~~~ -If you are writing Config Rules using either of the Python runtimes there will be a _test.py file deployed along with your Lambda function skeleton. This can be used to write unit tests according to the standard Python unittest framework (documented here: https://docs.python.org/3/library/unittest.html), which can be run using the `test-local` rdk command:: - - $ rdk test-local MyTestRule - Running local test! - Testing MyTestRule - Looking for tests in /Users/mborch/Code/rdk-dev/MyTestRule - - --------------------------------------------------------------------- - - Ran 0 tests in 0.000s - - OK - - -The test file includes setup for the MagicMock library that can be used to stub boto3 API calls if your rule logic will involve making API calls to gather additional information about your AWS environment. For some tips on how to do this, check out this blog post: https://sgillies.net/2017/10/19/mock-is-magic.html - -Modify Rule -~~~~~~~~~~~ -If you need to change the parameters of a Config rule in your working directory you can use the ``modify`` command. Any parameters you specify will overwrite existing values, any that you do not specify will not be changed. - -:: - - $ rdk modify MyRule --runtime python3.9 --maximum-frequency TwentyFour_Hours --input-parameters '{"desiredInstanceType":"t2.micro"}' - Running modify! - Modified Rule 'MyRule'. Use the `deploy` command to push your changes to AWS. - -Again, on Windows the input parameters would look like:: - - '{\"desiredInstanceType\":\"t2.micro\"}' - -It is worth noting that until you actually call the ``deploy`` command your rule only exists in your working directory, none of the Rule commands discussed thus far actually makes changes to your account. - -Deploy Rule -~~~~~~~~~~~ -Once you have completed your compliance validation code and set your Rule's configuration, you can deploy the Rule to your account using the ``deploy`` command. This will zip up your code (and the other associated code files, if any) into a deployable package (or run a gradle build if you have selected the java8 runtime or run the lambda packaging step from the dotnet CLI if you have selected the dotnetcore1.0 runtime), copy that zip file to S3, and then launch or update a CloudFormation stack that defines your Config Rule, Lambda function, and the necessary permissions and IAM Roles for it to function. Since CloudFormation does not deeply inspect Lambda code objects in S3 to construct its changeset, the ``deploy`` command will also directly update the Lambda function for any subsequent deployments to make sure code changes are propagated correctly. - -:: - - $ rdk deploy MyRule - Running deploy! - Zipping MyRule - Uploading MyRule - Creating CloudFormation Stack for MyRule - Waiting for CloudFormation stack operation to complete... - ... - Waiting for CloudFormation stack operation to complete... - Config deploy complete. - -The exact output will vary depending on Lambda runtime. You can use the --all flag to deploy all of the rules in your working directory. - -View Logs For Deployed Rule -~~~~~~~~~~~~~~~~~~~~~~~~~~~ -Once the Rule has been deployed to AWS you can get the CloudWatch logs associated with your lambda function using the ``logs`` command. - -:: - - $ rdk logs MyRule -n 5 - 2017-11-15 22:59:33 - START RequestId: 96e7639a-ca15-11e7-95a2-b1521890638d Version: $LATEST - 2017-11-15 23:41:13 - REPORT RequestId: 68e0304f-ca1b-11e7-b735-81ebae95acda Duration: 0.50 ms Billed Duration: 100 ms Memory Size: 256 MB - Max Memory Used: 36 MB - 2017-11-15 23:41:13 - END RequestId: 68e0304f-ca1b-11e7-b735-81ebae95acda - 2017-11-15 23:41:13 - Default RDK utility class does not yet support Scheduled Notifications. - 2017-11-15 23:41:13 - START RequestId: 68e0304f-ca1b-11e7-b735-81ebae95acda Version: $LATEST - -You can use the ``-n`` and ``-f`` command line flags just like the UNIX ``tail`` command to view a larger number of log events and to continuously poll for new events. The latter option can be useful in conjunction with manually initiating Config Evaluations for your deploy Config Rule to make sure it is behaving as expected. - - -Advanced Features ------------------ -Cross-Account Deployments -~~~~~~~~~~~~~~~~~~~~~~~~~ -Features have been added to the RDK to facilitate the cross-account deployment pattern that enterprise customers have standardized on for custom Config Rules. A cross-account architecture is one in which the Lambda functions are deployed to a single central "Compliance" account (which may be the same as a central "Security" account), and the Config Rules are deployed to any number of "Satellite" accounts that are used by other teams or departments. This gives the compliance team confidence that their Rule logic cannot be tampered with and makes it much easier for them to modify rule logic without having to go through a complex deployment process to potentially hundreds of AWS accounts. The cross-account pattern uses two advanced RDK features - functions-only deployments and the `create-rule-template` command. - -**Function-Only Deployment** - -By using the `-f` or `--functions-only` flag on the `deploy` command the RDK will deploy only the necessary Lambda Functions, Lambda Execution Role, and Lambda Permissions to the account specified by the execution credentials. It accomplishes this by batching up all of the Lambda function CloudFormation snippets for the selected Rule(s) into a single dynamically generated template and deploy that CloudFormation template. One consequence of this is that subsequent deployments that specify a different set of Rules for the same stack name will update that CloudFormation stack, and any Rules that were included in the first deployment but not in the second will be removed. You can use the `--stack-name` parameter to override the default CloudFormation stack name if you need to manage different subsets of your Lambda Functions independently. The intended usage is to deploy the functions for all of the Config rules in the Security/Compliance account, which can be done simply by using `rdk deploy -f --all` from your working directory. - -**`create-rule-template` command** - -This command generates a CloudFormation template that defines the AWS Config rules themselves, along with the Config Role, Config data bucket, Configuration Recorder, and Delivery channel necessary for the Config rules to work in a satellite account. You must specify the file name for the generated template using the `--output-file` or `o` command line flags. The generated template takes a single parameter of the AccountID of the central compliance account that contains the Lambda functions that will back your custom Config Rules. The generated template can be deployed in the desired satellite accounts through any of the means that you can deploy any other CloudFormation template, including the console, the CLI, as a CodePipeline task, or using StackSets. The `create-rule-template` command takes all of the standard arguments for selecting Rules to include in the generated template, including lists of individual Rule names, an `--all` flag, or using the RuleSets feature described below. - -:: - - $ rdk create-rule-template -o remote-rule-template.json --all - Generating CloudFormation template! - CloudFormation template written to remote-rule-template.json - - -RuleSets -~~~~~~~~ -New as of version 0.3.11, it is possible to add RuleSet tags to rules that can be used to deploy and test groups of rules together. Rules can belong to multiple RuleSets, and RuleSet membership is stored only in the parameters.json metadata. The `deploy`, `create-rule-template`, and `test-local` commands are RuleSet-aware such that a RuleSet can be passed in as the target instead of `--all` or a specific named Rule. - -A comma-delimited list of RuleSets can be added to a Rule when you create it (using the `--rulesets` flag), as part of a `modify` command, or using new `ruleset` subcommands to add or remove individual rules from a RuleSet. - -Running `rdk rulesets list` will display a list of the RuleSets currently defined across all of the Rules in the working directory - -:: - - rdk-dev $ rdk rulesets list - RuleSets: AnotherRuleSet MyNewSet - -Naming a specific RuleSet will list all of the Rules that are part of that RuleSet. - -:: - - rdk-dev $ rdk rulesets list AnotherRuleSet - Rules in AnotherRuleSet : RSTest - -Rules can be added to or removed from RuleSets using the `add` and `remove` subcommands: - -:: - - rdk-dev $ rdk rulesets add MyNewSet RSTest - RSTest added to RuleSet MyNewSet - - rdk-dev $ rdk rulesets remove AnotherRuleSet RSTest - RSTest removed from RuleSet AnotherRuleSet - -RuleSets are a convenient way to maintain a single repository of Config Rules that may need to have subsets of them deployed to different environments. For example your development environment may contain some of the Rules that you run in Production but not all of them; RuleSets gives you a way to identify and selectively deploy the appropriate Rules to each environment. - - -Region Sets -~~~~~~~~~~~ -`rdk init`, `rdk deploy`, and `rdk undeploy` subcommands now support running across multiple regions in parallel using region sets defined in a yaml file. - -To run a subcommand with a region set, pass in the region set yaml file and the specific region set to run through. - -:: - - $ rdk -f regions.yaml --region-set region-set-1 undeploy CUSTOM_RULE - Deleting rules in the following regions: ['sa-east-1', 'us-east-1']. - Delete specified Rules and Lambda Functions from your AWS Account? (y/N): y - [sa-east-1] Running un-deploy! - [us-east-1] Running un-deploy! - [us-east-1] Rule removal initiated. Waiting for Stack Deletion to complete. - [sa-east-1] Rule removal initiated. Waiting for Stack Deletion to complete. - [us-east-1] CloudFormation stack operation complete. - [us-east-1] Rule removal complete, but local files have been preserved. - [us-east-1] To re-deploy, use the 'deploy' command. - [sa-east-1] CloudFormation stack operation complete. - [sa-east-1] Rule removal complete, but local files have been preserved. - [sa-east-1] To re-deploy, use the 'deploy' command. - -Example region set file: - -:: - - default: - - us-west-1 - - us-west-2 - region-set-1: - - sa-east-1 - - us-east-1 - region-set-2: - - ap-southeast-1 - - eu-central-1 - - sa-east-1 - - us-east-1 diff --git a/docs/index.md b/docs/index.md new file mode 120000 index 00000000..32d46ee8 --- /dev/null +++ b/docs/index.md @@ -0,0 +1 @@ +../README.md \ No newline at end of file diff --git a/docs/index.rst b/docs/index.rst deleted file mode 100644 index ffa34f9d..00000000 --- a/docs/index.rst +++ /dev/null @@ -1,24 +0,0 @@ -.. RDK documentation master file, created by - sphinx-quickstart on Mon Sep 3 12:46:08 2018. - You can adapt this file completely to your liking, but it should at least - contain the root `toctree` directive. - -Welcome to RDK's documentation! -=============================== - -.. toctree:: - :maxdepth: 3 - :caption: Contents: - - introduction - getting_started - references - - - -Indices and tables -================== - -* :ref:`genindex` -* :ref:`modindex` -* :ref:`search` diff --git a/docs/introduction.rst b/docs/introduction.rst deleted file mode 100644 index 601ad9a8..00000000 --- a/docs/introduction.rst +++ /dev/null @@ -1,7 +0,0 @@ -Introduction -============ - -Rule Development Kit - Version 2 -This tool should be considered in "Open Beta". We would greatly appreciate feedback and bug reports either as github issues or emails to rdk-maintainers@amazon.com! - -The RDK is designed to support a "Compliance-as-Code" workflow that is intuitive and productive. It abstracts away much of the undifferentiated heavy lifting associated with deploying AWS Config rules backed by custom lambda functions, and provides a streamlined develop-deploy-monitor iterative process. diff --git a/docs/legacy-docs.md b/docs/legacy-docs.md new file mode 100644 index 00000000..d7bba05c --- /dev/null +++ b/docs/legacy-docs.md @@ -0,0 +1,378 @@ +# Legacy RDK Documentation + +Please note, this documentation is a carry-over from the old RTD documentation pre-mkdocs. +This will likely be removed in a subsequent release. + +## Introduction + +Rule Development Kit - Version 2 This tool should be considered in +"Open Beta". We would greatly appreciate feedback and bug reports +either as github issues or emails to ! + +The RDK is designed to support a "Compliance-as-Code" workflow that is +intuitive and productive. It abstracts away much of the undifferentiated +heavy lifting associated with deploying AWS Config rules backed by +custom lambda functions, and provides a streamlined +develop-deploy-monitor iterative process. + +## Prerequisites + +RDK uses python 3.7+. You will need to have an AWS account and +sufficient permissions to manage the Config service, and to create and +manage S3 Buckets, Roles, and Lambda Functions. An AWS IAM Policy +Document that describes the minimum necessary permissions can be found +[here](https://github.com/awslabs/aws-config-rdk/blob/master/policy/rdk-minimum-permissions.json) +on github. + +Under the hood, rdk uses boto3 to make API calls to AWS, so you can set +your credentials any way that boto3 recognizes (options 3 through 8 in +the [boto docs +here](https://boto3.readthedocs.io/en/latest/guide/configuration.html) ) +or pass them in with the command-line parameters --profile, --region, +--access-key-id, or --secret-access-key + +## Installation + +If you just want to use the RDK, go ahead and install it using pip: + + pip install rdk + +Alternately, if you want to see the code and/or contribute you can clone +the [git repo](https://github.com/awslabs/aws-config-rdk/) , and then +from the repo directory use pip to install the package. Use the '-e' +flag to generate symlinks so that any edits you make will be reflected +when you run the installed package. + +If you are going to author your Lambda functions using Java you will +need to have Java 8 and gradle installed. If you are going to author +your Lambda functions in C# you will need to have the dotnet CLI and the +.NET Core Runtime 1.08 installed. : + + pip install -e . + +To make sure the rdk is installed correctly, running the package from +the command line without any arguments should display help information. + + rdk + usage: rdk [-h] [-p PROFILE] [-k ACCESS_KEY] [-s SECRET_ACCESS_KEY] + [-r REGION] + ... + rdk: error: the following arguments are required: , + +## Usage + +### Configure your env + +To use the RDK, it's recommended to create a directory that will be +your working directory. This should be committed to a source code repo, +and ideally created as a python virtualenv. In that directory, run the +`init` command to set up your AWS Config environment. + + rdk init + Running init! + Creating Config bucket config-bucket-780784666283 + Creating IAM role config-role + Waiting for IAM role to propagate + Config Service is ON + Config setup complete. + Creating Code bucket config-rule-code-bucket-780784666283ap-southeast-1 + +Running `init` subsequent times will validate your AWS Config setup and +re-create any S3 buckets or IAM resources that are needed. + +### Create Rules + +In your working directory, use the `create` command to start creating a +new custom rule. You must specify the runtime for the lambda function +that will back the Rule, and you can also specify a resource type (or +comma-separated list of types) that the Rule will evaluate or a maximum +frequency for a periodic rule. This will add a new directory for the +rule and populate it with several files, including a skeleton of your +Lambda code. + + rdk create MyRule --runtime python3.8 --resource-types AWS::EC2::Instance --input-parameters '{"desiredInstanceType":"t2.micro"}' + Running create! + Local Rule files created. + +On Windows it is necessary to escape the double-quotes when specifying +input parameters, so the --input-parameters argument +would instead look something like this: + + '{"desiredInstanceType":"t2.micro"}' + +Note that you can create rules that use EITHER resource-types OR +maximum-frequency, but not both. We have found that rules that try to be +both event-triggered as well as periodic wind up being very complicated +and so we do not recommend it as a best practice. + +### Edit Rules Locally + +Once you have created the rule, edit the python file in your rule +directory (in the above example it would be `MyRule/MyRule.py`, but may +be deeper into the rule directory tree depending on your chosen Lambda +runtime) to add whatever logic your Rule requires in the +`evaluate_compliance` function. You will have access to the CI that was +sent by Config, as well as any parameters configured for the Config +Rule. Your function should return either a simple compliance status (one +of `COMPLIANT`, `NONCOMPLIANT`, or `NOT_APPLICABLE`), or if you're +using the python or node runtimes you can return a JSON object with +multiple evaluation responses that the RDK will send back to AWS Config. +An example would look like: + + for sg in response['SecurityGroups']: + evaluations.append( + { + 'ComplianceResourceType': 'AWS::EC2::SecurityGroup', + 'ComplianceResourceId': sg['GroupId'], + 'ComplianceType': 'COMPLIANT', + 'Annotation': 'This is an important note.', + 'OrderingTimestamp': str(datetime.datetime.now()) + }) + + + return evaluations + +This is necessary for periodic rules that are not triggered by any CI +change (which means the CI that is passed in will be null), and also for +attaching annotations to your evaluation results. + +If you want to see what the JSON structure of a CI looks like for +creating your logic, you can use + + rdk sample-ci + +to output a formatted JSON document. + +### Write and Run Unit Tests + +If you are writing Config Rules using either of the Python runtimes +there will be a `_test.py` file deployed along with your +Lambda function skeleton. This can be used to write unit tests according +to the standard Python unittest framework (documented here: +), which can be run +using the [test-local]{.title-ref} rdk command: + + rdk test-local MyTestRule + Running local test! + Testing MyTestRule + Looking for tests in /Users/mborch/Code/rdk-dev/MyTestRule + + --------------------------------------------------------------------- + + Ran 0 tests in 0.000s + + OK + + +The test file includes setup for the MagicMock library that can be used +to stub boto3 API calls if your rule logic will involve making API calls +to gather additional information about your AWS environment. For some +tips on how to do this, check out this blog post: + + +### Modify Rule + +If you need to change the parameters of a Config rule in your working +directory you can use the `modify` command. Any parameters you specify +will overwrite existing values, any that you do not specify will not be +changed. + + rdk modify MyRule --runtime python3.9 --maximum-frequency TwentyFour_Hours --input-parameters '{"desiredInstanceType":"t2.micro"}' + Running modify! + Modified Rule 'MyRule'. Use the `deploy` command to push your changes to AWS. + +Again, on Windows the input parameters would look like: + + '{"desiredInstanceType":"t2.micro"}' + +It is worth noting that until you actually call the `deploy` command +your rule only exists in your working directory, none of the Rule +commands discussed thus far actually makes changes to your account. + +### Deploy Rule + +Once you have completed your compliance validation code and set your +Rule's configuration, you can deploy the Rule to your account using the +`deploy` command. This will zip up your code (and the other associated +code files, if any) into a deployable package (or run a gradle build if +you have selected the java8 runtime or run the lambda packaging step +from the dotnet CLI if you have selected the dotnetcore1.0 runtime), +copy that zip file to S3, and then launch or update a CloudFormation +stack that defines your Config Rule, Lambda function, and the necessary +permissions and IAM Roles for it to function. Since CloudFormation does +not deeply inspect Lambda code objects in S3 to construct its changeset, +the `deploy` command will also directly update the Lambda function for +any subsequent deployments to make sure code changes are propagated +correctly. + + rdk deploy MyRule + Running deploy! + Zipping MyRule + Uploading MyRule + Creating CloudFormation Stack for MyRule + Waiting for CloudFormation stack operation to complete... + ... + Waiting for CloudFormation stack operation to complete... + Config deploy complete. + +The exact output will vary depending on Lambda runtime. You can use the +--all flag to deploy all of the rules in your working directory. + +### View Logs For Deployed Rule + +Once the Rule has been deployed to AWS you can get the CloudWatch logs +associated with your lambda function using the `logs` command. + + rdk logs MyRule -n 5 + 2017-11-15 22:59:33 - START RequestId: 96e7639a-ca15-11e7-95a2-b1521890638d Version: $LATEST + 2017-11-15 23:41:13 - REPORT RequestId: 68e0304f-ca1b-11e7-b735-81ebae95acda Duration: 0.50 ms Billed Duration: 100 ms Memory Size: 256 MB + Max Memory Used: 36 MB + 2017-11-15 23:41:13 - END RequestId: 68e0304f-ca1b-11e7-b735-81ebae95acda + 2017-11-15 23:41:13 - Default RDK utility class does not yet support Scheduled Notifications. + 2017-11-15 23:41:13 - START RequestId: 68e0304f-ca1b-11e7-b735-81ebae95acda Version: $LATEST + +You can use the `-n` and `-f` command line flags just like the UNIX +`tail` command to view a larger number of log events and to continuously +poll for new events. The latter option can be useful in conjunction with +manually initiating Config Evaluations for your deploy Config Rule to +make sure it is behaving as expected. + +## Advanced Features + +### Cross-Account Deployments + +Features have been added to the RDK to facilitate the cross-account +deployment pattern that enterprise customers have standardized on for +custom Config Rules. A cross-account architecture is one in which the +Lambda functions are deployed to a single central "Compliance" account +(which may be the same as a central "Security" account), and the +Config Rules are deployed to any number of "Satellite" accounts that +are used by other teams or departments. This gives the compliance team +confidence that their Rule logic cannot be tampered with and makes it +much easier for them to modify rule logic without having to go through a +complex deployment process to potentially hundreds of AWS accounts. The +cross-account pattern uses two advanced RDK features - functions-only +deployments and the create-rule-template command. + +**Function-Only Deployment** + +By using the -f or --functions-only flag on +the deploy command the RDK will deploy only the necessary +Lambda Functions, Lambda Execution Role, and Lambda Permissions to the +account specified by the execution credentials. It accomplishes this by +batching up all of the Lambda function CloudFormation snippets for the +selected Rule(s) into a single dynamically generated template and deploy +that CloudFormation template. One consequence of this is that subsequent +deployments that specify a different set of Rules for the same stack +name will update that CloudFormation stack, and any Rules that were +included in the first deployment but not in the second will be removed. +You can use the --stack-name parameter to override the +default CloudFormation stack name if you need to manage different +subsets of your Lambda Functions independently. The intended usage is to +deploy the functions for all of the Config rules in the +Security/Compliance account, which can be done simply by using rdk +deploy -f --all from your working directory. + +**`create-rule-template` command** + +This command generates a CloudFormation template that defines the AWS +Config rules themselves, along with the Config Role, Config data bucket, +Configuration Recorder, and Delivery channel necessary for the Config +rules to work in a satellite account. You must specify the file name for +the generated template using the --output-file or +o command line flags. The generated template takes a +single parameter of the AccountID of the central compliance account that +contains the Lambda functions that will back your custom Config Rules. +The generated template can be deployed in the desired satellite accounts +through any of the means that you can deploy any other CloudFormation +template, including the console, the CLI, as a CodePipeline task, or +using StackSets. The create-rule-template command takes +all of the standard arguments for selecting Rules to include in the +generated template, including lists of individual Rule names, an +--all flag, or using the RuleSets feature described +below. + + rdk create-rule-template -o remote-rule-template.json --all + Generating CloudFormation template! + CloudFormation template written to remote-rule-template.json + +### RuleSets + +New as of version 0.3.11, it is possible to add RuleSet tags to rules +that can be used to deploy and test groups of rules together. Rules can +belong to multiple RuleSets, and RuleSet membership is stored only in +the parameters.json metadata. The deploy, +create-rule-template, and test-local +commands are RuleSet-aware such that a RuleSet can be passed in as the +target instead of --all or a specific named Rule. + +A comma-delimited list of RuleSets can be added to a Rule when you +create it (using the --rulesets flag), as part of a +modify command, or using new ruleset +subcommands to add or remove individual rules from a RuleSet. + +Running rdk rulesets list will display a list of the +RuleSets currently defined across all of the Rules in the working +directory + + rdk rulesets list + RuleSets: AnotherRuleSet MyNewSet + +Naming a specific RuleSet will list all of the Rules that are part of +that RuleSet. + + rdk rulesets list AnotherRuleSet + Rules in AnotherRuleSet : RSTest + +Rules can be added to or removed from RuleSets using the +add and remove subcommands: + + rdk rulesets add MyNewSet RSTest + RSTest added to RuleSet MyNewSet + + rdk rulesets remove AnotherRuleSet RSTest + RSTest removed from RuleSet AnotherRuleSet + +RuleSets are a convenient way to maintain a single repository of Config +Rules that may need to have subsets of them deployed to different +environments. For example your development environment may contain some +of the Rules that you run in Production but not all of them; RuleSets +gives you a way to identify and selectively deploy the appropriate Rules +to each environment. + +### Region Sets + +rdk init, rdk deploy, and rdk +undeploy subcommands now support running across multiple +regions in parallel using region sets defined in a yaml file. + +To run a subcommand with a region set, pass in the region set yaml file +and the specific region set to run through. + + rdk -f regions.yaml --region-set region-set-1 undeploy CUSTOM_RULE + Deleting rules in the following regions: ['sa-east-1', 'us-east-1']. + Delete specified Rules and Lambda Functions from your AWS Account? (y/N): y + [sa-east-1] Running un-deploy! + [us-east-1] Running un-deploy! + [us-east-1] Rule removal initiated. Waiting for Stack Deletion to complete. + [sa-east-1] Rule removal initiated. Waiting for Stack Deletion to complete. + [us-east-1] CloudFormation stack operation complete. + [us-east-1] Rule removal complete, but local files have been preserved. + [us-east-1] To re-deploy, use the 'deploy' command. + [sa-east-1] CloudFormation stack operation complete. + [sa-east-1] Rule removal complete, but local files have been preserved. + [sa-east-1] To re-deploy, use the 'deploy' command. + +Example region set file: + + default: + - us-west-1 + - us-west-2 + region-set-1: + - sa-east-1 + - us-east-1 + region-set-2: + - ap-southeast-1 + - eu-central-1 + - sa-east-1 + - us-east-1 diff --git a/docs/make.bat b/docs/make.bat deleted file mode 100644 index add241e6..00000000 --- a/docs/make.bat +++ /dev/null @@ -1,36 +0,0 @@ -@ECHO OFF - -pushd %~dp0 - -REM Command file for Sphinx documentation - -if "%SPHINXBUILD%" == "" ( - set SPHINXBUILD=sphinx-build -) -set SOURCEDIR=. -set BUILDDIR=_build -set SPHINXPROJ=RDK - -if "%1" == "" goto help - -%SPHINXBUILD% >NUL 2>NUL -if errorlevel 9009 ( - echo. - echo.The 'sphinx-build' command was not found. Make sure you have Sphinx - echo.installed, then set the SPHINXBUILD environment variable to point - echo.to the full path of the 'sphinx-build' executable. Alternatively you - echo.may add the Sphinx directory to PATH. - echo. - echo.If you don't have Sphinx installed, grab it from - echo.http://sphinx-doc.org/ - exit /b 1 -) - -%SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% -goto end - -:help -%SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% - -:end -popd diff --git a/docs/reference/clean.rst b/docs/reference/clean.rst deleted file mode 100644 index 07804bad..00000000 --- a/docs/reference/clean.rst +++ /dev/null @@ -1,10 +0,0 @@ -Clean ------ - -.. argparse:: - :module: rdk - :func: get_clean_parser - :prog: rdk clean - :nodescription: - - The ``clean`` command is the inverse of the ``init`` command, and can be used to completely remove Config resources from an account, including the Configuration Recorder, Delivery Channel, S3 buckets, Roles, and Permissions. This is useful for testing account provisioning automation and for running automated tests in a clean environment. diff --git a/docs/reference/create-rule-template.rst b/docs/reference/create-rule-template.rst deleted file mode 100644 index 1d4903d9..00000000 --- a/docs/reference/create-rule-template.rst +++ /dev/null @@ -1,22 +0,0 @@ -Create-Rule-Template --------------------- - -.. argparse:: - :module: rdk - :func: get_create_rule_template_parser - :prog: rdk create-rule-template - :nodescription: - - Generates and saves to a file a single CloudFormation template that can be used to deploy the specified Rule(s) into any account. This feature has two primary uses: - - - Multi-account Config setup in which the Lambda Functions for custom Rules are deployed into a centralized "security" or "compliance" account and the Config Rules themselves are deployed into "application" or "satellite" accounts. - - Combine many Config Rules into a single CloudFormation template for easier atomic deployment and management. - - The generated CloudFormation template includes a Parameter for the AccountID that contains the Lambda functions that provide the compliance logic for the Rules, and also exposes all of the Config Rule input parameters as CloudFormation stack parameters. - - By default the generated CloudFormation template will set up Config as per the settings used by the RDK ``init`` command, but those resources can be omitted using the ``--rules-only`` flag. - - The ``--config-role-arn`` flag can be used for assigning existing config role to the created Configuration Recorder. - The ``-t | --tag-config-rules-script `` can now be used for output the script generated for create tags for each config rule. - - As of version 0.6, RDK supports Config remediation. Note that in order to use SSM documents for remediation you must supply all of the necessary document parameters. These can be found in the SSM document listing on the AWS console, but RDK will *not* validate at rule creation that you have all of the necessary parameters supplied. \ No newline at end of file diff --git a/docs/reference/create.rst b/docs/reference/create.rst deleted file mode 100644 index 4a5d7589..00000000 --- a/docs/reference/create.rst +++ /dev/null @@ -1,10 +0,0 @@ -Create ------- - -.. argparse:: - :module: rdk - :func: get_create_parser - :prog: rdk create - - As of version 0.6, RDK supports Config remediation. Note that in order to use SSM documents for remediation you must supply all of the necessary document parameters. These can be found in the SSM document listing on the AWS console, but RDK will *not* validate at rule creation that you have all of the necessary parameters supplied. - diff --git a/docs/reference/deploy.rst b/docs/reference/deploy.rst deleted file mode 100644 index 512c5918..00000000 --- a/docs/reference/deploy.rst +++ /dev/null @@ -1,25 +0,0 @@ -Deploy ------- - -.. argparse:: - :module: rdk - :func: get_deploy_parser - :prog: rdk deploy - :nodescription: - - This command will deploy the specified Rule(s) to the Account and Region determined by the credentials being used to execute the command, and the value of the AWS_DEFAULT_REGION environment variable, unless those credentials or region are overridden using the common flags. - - Once deployed, RDK will _not_ explicitly start a Rule evaluation. Depending on the changes being made to your Config Rule setup AWS Config may re-evaluate the deployed Rules automatically, or you can run an evaluation using the AWS configservice CLI. - - The ``--functions-only`` flag can be used as part of a multi-account deployment strategy to push _only_ the Lambda functions (and necessary Roles and Permssions) to the target account. This is intended to be used in conjunction with the ``create-rule-template`` command in order to separate the compliance logic from the evaluated accounts. For an example of how this looks in practice, check out the `AWS Compliance-as-Code Engine `_. - The ``--rdklib-layer-arn`` flag can be used for attaching Lambda Layer ARN that contains the desired rdklib. Note that Lambda Layers are region-specific. - The ``--lambda-role-arn`` flag can be used for assigning existing iam role to all Lambda functions created for Custom Config Rules. - The ``--lambda-layers`` flag can be used for attaching a comma-separated list of Lambda Layer ARNs to deploy with your Lambda function(s). - The ``--lambda-subnets`` flag can be used for attaching a comma-separated list of Subnets to deploy your Lambda function(s). - The ``--lambda-security-groups`` flag can be used for attaching a comma-separated list of Security Groups to deploy with your Lambda function(s). - The ``--custom-code-bucket`` flag can be used for providing the custom code S3 bucket name, which is not created with rdk init, for generated cloudformation template storage. - The ``--boundary-policy-arn`` flag can be used for attaching boundary Policy ARN that will be added to rdkLambdaRole. - The ``--lambda-timeout`` flag can be used for specifying the timeout associated to the lambda function - - - Note: Behind the scenes the ``--functions-only`` flag generates a CloudFormation template and runs a "create" or "update" on the targeted AWS Account and Region. If subsequent calls to ``deploy`` with the ``--functions-only`` flag are made with the same stack name (either the default or otherwise) but with *different Config rules targeted*, any Rules deployed in previous ``deploy``s but not included in the latest ``deploy`` will be removed. After a functions-only ``deploy`` _only_ the Rules specifically targeted by that command (either through Rulesets or an explicit list supplied on the command line) will be deployed in the environment, all others will be removed.s diff --git a/docs/reference/export.rst b/docs/reference/export.rst deleted file mode 100644 index f71a3bd2..00000000 --- a/docs/reference/export.rst +++ /dev/null @@ -1,22 +0,0 @@ -Export ------- - -.. argparse:: - :module: rdk - :func: get_export_parser - :prog: rdk export - :nodescription: - - This command will export the specified Rule(s) to the terraform file, it supports the terraform versions 0.11 and 0.12. - - - The ``--format`` flag can be used to specify export format, currently it supports only terraform. - The ``--version`` flag can be used to specify the terraform version. - The ``--rdklib-layer-arn`` flag can be used for attaching Lambda Layer ARN that contains the desired rdklib. Note that Lambda Layers are region-specific. - The ``--lambda-role-arn`` flag can be used for assigning existing iam role to all Lambda functions created for Custom Config Rules. - The ``--lambda-layers`` flag can be used for attaching a comma-separated list of Lambda Layer ARNs to deploy with your Lambda function(s). - The ``--lambda-subnets`` flag can be used for attaching a comma-separated list of Subnets to deploy your Lambda function(s). - The ``--lambda-security-groups`` flag can be used for attaching a comma-separated list of Security Groups to deploy with your Lambda function(s). - The ``--lambda-timeout`` flag can be used for specifying the timeout associated to the lambda function - - \ No newline at end of file diff --git a/docs/reference/init.rst b/docs/reference/init.rst deleted file mode 100644 index fe2dc9af..00000000 --- a/docs/reference/init.rst +++ /dev/null @@ -1,28 +0,0 @@ -Init ----- - -.. argparse:: - :module: rdk - :func: get_init_parser - :prog: rdk init - :nodescription: - - Sets up the AWS Config Service in an AWS Account. This includes: - - - Config Configuration Recorder - - Config Delivery Channel - - IAM Role for Delivery Channel - - S3 Bucket for Configuration Snapshots - - S3 Bucket for Lambda Code - - Additionally, ``init`` will make sure that the Configuration Recorder is on and functioning, that the Delivery Channel has the appropriate Role attached, and that the Delivery Channel Role has the proper permissions. - - Note: Even without Config Rules running the Configuration Recorder is still capturing Configuration Item snapshots and storing them in S3, so running ``init`` will incur AWS charges! - - Also Note: AWS Config is a regional service, so running ``init`` will only set up Config in the region currently specified in your AWS_DEFAULT_REGION environment variable or in the ``--region`` flag. - - Advanced Options: - - - ``--config-bucket-exists-in-another-account``: [optional] If the bucket being used by a Config Delivery Channel exists in another account, it is possible to skip the check that the bucket exists. This is useful when using ``init`` to initialize AWS Config in an account which already has a delivery channel setup with a central bucket. Currently, the rdk lists out all the buckets within the account your are running ``init`` from, to check if the provided bucket name exists, if it doesn't then it will create it. This presents an issue when a Config Delivery Channel has been configured to push configuration recordings to a central bucket. The bucket will never be found as it doesn't exist in the same account, but cannot be created as bucket names have to be globally unique. - - ``--skip-code-bucket-creation``: [optional] If you want to use custom code bucket for rdk, enable this and use flag ``--custom-code-bucket`` to ``rdk deploy`` - - ``control-tower``: [optional] If your account is part of an AWS Control Tower setup --control-tower will skip the setup of configuration_recorder and delivery_channel \ No newline at end of file diff --git a/docs/reference/logs.rst b/docs/reference/logs.rst deleted file mode 100644 index 2b73c9af..00000000 --- a/docs/reference/logs.rst +++ /dev/null @@ -1,12 +0,0 @@ -Logs ----- - -.. argparse:: - :module: rdk - :func: get_logs_parser - :prog: rdk logs - :nodescription: - - The ``logs`` command provides a shortcut to accessing the CloudWatch Logs output from the Lambda Functions that back your custom Config Rules. Logs are displayed in chronological order going back the number of log entries specified by the ``--number`` flag (default 3). It supports a ``--follow`` flag similar to the UNIX command ``tail`` so that you can choose to continually poll CloudWatch to deliver new log items as they are delivered by your Lambda function. - - In addition to any output that your function emits via ``print()`` or ``console.log()`` commands, Lambda will also record log lines for the start and stop of each Lambda invocation, including the runtime and memory usage. diff --git a/docs/reference/modify.rst b/docs/reference/modify.rst deleted file mode 100644 index fc5f171b..00000000 --- a/docs/reference/modify.rst +++ /dev/null @@ -1,10 +0,0 @@ -Modify ------- - -.. argparse:: - :module: rdk - :func: get_modify_parser - :prog: rdk modify - :nodescription: - - Used to modify the local metadata for Config Rules created by the RDK. This command takes the same arguments as the ``create`` command (all of them optional), and overwrites the Rule metadata for any flag specified. Changes made using ``modify`` are not automatically pushed out to your AWS Account, and must be deployed as usual using the ``deploy`` command. diff --git a/docs/reference/rulesets.rst b/docs/reference/rulesets.rst deleted file mode 100644 index f09cab7b..00000000 --- a/docs/reference/rulesets.rst +++ /dev/null @@ -1,16 +0,0 @@ -Rulesets --------- - -.. argparse:: - :module: rdk - :func: get_rulesets_parser - :prog: rdk rulesets - :nodescription: - - Rulesets provide a mechanism to tag individual Config Rules into groups that can be acted on as a unit. Ruleset tags are single keywords, and the commands ``deploy``, ``create-rule-template``, and ``undeploy`` can all expand Ruleset parameters and operate on the resulting list of Rules. - - The most common use-case for Rulesets is to define standardized Account metadata or data classifications, and then tag individual Rules to all of the appropriate metadata tags or classification levels. - - Example: If you have Account classifications of "Public", "Private", and "Restricted" you can tag all of your Rules as "Restricted", and a subset of them that deal with private network security as "Private". Then when you need to deploy controls to a new "Private" account you can simply use ``rdk create-rule-template --rulesets Private`` to generate a CloudFormation template that includes all of the Rules necessary for your "Private" classification, but omit the Rules that are only necessary for "Restricted" accounts. Additionally, as your compliance requirements change and you add Config Rules you can tag them as appropriate, re-generate your CloudFormation templates, and re-deploy to make sure your Accounts are all up-to-date. - - You may also choose to classify accounts using binary attributes ("Prod" vs. "Non-Prod" or "PCI" vs. "Non-PCI"), and then generate account-specific CloudFormation templates using the Account metadata to ensure that the appropriate controls are deployed. diff --git a/docs/reference/sample-ci.rst b/docs/reference/sample-ci.rst deleted file mode 100644 index 23567094..00000000 --- a/docs/reference/sample-ci.rst +++ /dev/null @@ -1,18 +0,0 @@ -Sample-CI ---------- - -.. argparse:: - :module: rdk - :func: get_sample_ci_parser - :prog: rdk sample-ci - :nodescription: - - This utility command outputs a sample Configuration Item for the specified resource type. This can be useful when writing new custom Config Rules to help developers know what the CI structure and plausible values for the resource type are. - - Note that you can construct Config Evaluations for any resource type that is supported by CloudFormation, however you can not create change-triggered Config Rules for resource types not explicitly supported by Config, and some of the console functionality in AWS Config may be limited. - - `CFN-supported resources `_ - `Config-supported resources `_ - - ci_type : @replace - One of the supported Config-supported resource types. diff --git a/docs/reference/test-local.rst b/docs/reference/test-local.rst deleted file mode 100644 index 63476a9e..00000000 --- a/docs/reference/test-local.rst +++ /dev/null @@ -1,10 +0,0 @@ -Test-Local ----------- - -.. argparse:: - :module: rdk - :func: get_test_local_parser - :prog: rdk test-local - :nodescription: - - Shorthand command for running the unit tests defined for Config Rules that use a Python runtime. When a Python 3.7+ Rule is created using the ``create`` command a unit test template is created in the Rule directory. This test boilerplate includes minimal tests, as well as a framework for using the ``unittest.mock`` library for stubbing out Boto3 calls. This allows more sophisticated test cases to be written for Periodic rules that need to make API calls to gather information about the environment. diff --git a/docs/reference/undeploy.rst b/docs/reference/undeploy.rst deleted file mode 100644 index 4207d0e6..00000000 --- a/docs/reference/undeploy.rst +++ /dev/null @@ -1,12 +0,0 @@ -Undeploy --------- - -.. argparse:: - :module: rdk - :func: get_undeploy_parser - :prog: rdk undeploy - :nodescription: - - The inverse of ``deploy``, this command is used to remove a Config Rule and its Lambda Function from the targeted account. - - This is intended to be used primarily for clean-up for testing deployment automation (perhaps from a CI/CD pipeline) to ensure that it works from an empty account, or to clean up a test account during development. See also the `clean <./clean.html>`_ command if you want to more thoroughly scrub Config from your account. diff --git a/docs/references.rst b/docs/references.rst deleted file mode 100644 index 5b526844..00000000 --- a/docs/references.rst +++ /dev/null @@ -1,21 +0,0 @@ -Command Reference -================= - -.. argparse:: - :module: rdk - :func: get_command_parser - :prog: rdk - :nodescription: - - The RDK has some options that can be used to override the default behavior (mostly relating to the identity and credentials used by the tool) that are common to all of the sub-commands. - -Sub-Commands ------------- - -.. toctree:: - :maxdepth: 3 - :caption: Command Reference: - :titlesonly: - :glob: - - reference/* diff --git a/docs/requirements.txt b/docs/requirements.txt index 34fcb7dd..c1560e74 100644 --- a/docs/requirements.txt +++ b/docs/requirements.txt @@ -1,6 +1,299 @@ -Sphinx==1.7.8 -sphinx-argparse==0.2.5 -sphinx-rtd-theme==0.4.3 -sphinxcontrib-websupport==1.1.0 -PyYAML==5.4.1 -jinja2<3.1.0 +cached-property==1.5.2 ; python_full_version >= "3.7.2" and python_version < "3.8" \ + --hash=sha256:9fa5755838eecbb2d234c3aa390bd80fbd3ac6b6869109bfc1b499f7bd89a130 \ + --hash=sha256:df4f613cf7ad9a588cc381aaf4a512d26265ecebd5eb9e1ba12f1319eb85a6a0 +certifi==2022.12.7 ; python_full_version >= "3.7.2" and python_full_version < "4.0.0" \ + --hash=sha256:35824b4c3a97115964b408844d64aa14db1cc518f6562e8d7261699d1350a9e3 \ + --hash=sha256:4ad3232f5e926d6718ec31cfc1fcadfde020920e278684144551c91769c7bc18 +charset-normalizer==2.1.1 ; python_full_version >= "3.7.2" and python_full_version < "4.0.0" \ + --hash=sha256:5a3d016c7c547f69d6f81fb0db9449ce888b418b5b9952cc5e6e66843e9dd845 \ + --hash=sha256:83e9a75d1911279afd89352c68b45348559d1fc0506b054b346651b5e7fee29f +click==8.1.3 ; python_full_version >= "3.7.2" and python_full_version < "4.0.0" \ + --hash=sha256:7682dc8afb30297001674575ea00d1814d808d6a36af415a82bd481d37ba7b8e \ + --hash=sha256:bb4d8133cb15a609f44e8213d9b391b0809795062913b383c62be0ee95b1db48 +colorama==0.4.6 ; python_full_version >= "3.7.2" and python_full_version < "4.0.0" \ + --hash=sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44 \ + --hash=sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6 +ghp-import==2.1.0 ; python_full_version >= "3.7.2" and python_full_version < "4.0.0" \ + --hash=sha256:8337dd7b50877f163d4c0289bc1f1c7f127550241988d568c1db512c4324a619 \ + --hash=sha256:9c535c4c61193c2df8871222567d7fd7e5014d835f97dc7b7439069e2413d343 +griffe==0.28.2 ; python_full_version >= "3.7.2" and python_full_version < "4.0.0" \ + --hash=sha256:a471498b0b9505c721ea0e652fd77c97df1aeb56c4eb8c93d24bb1140da4216d \ + --hash=sha256:bde3a3dfa301a4b113c7fac3b2be45e5723bc50cda4c9cfe13f43c447c9aa5d1 +idna==3.4 ; python_full_version >= "3.7.2" and python_full_version < "4.0.0" \ + --hash=sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4 \ + --hash=sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2 +importlib-metadata==4.13.0 ; python_full_version >= "3.7.2" and python_version < "3.10" \ + --hash=sha256:8a8a81bcf996e74fee46f0d16bd3eaa382a7eb20fd82445c3ad11f4090334116 \ + --hash=sha256:dd0173e8f150d6815e098fd354f6414b0f079af4644ddfe90c71e2fc6174346d +jinja2==3.1.2 ; python_full_version >= "3.7.2" and python_full_version < "4.0.0" \ + --hash=sha256:31351a702a408a9e7595a8fc6150fc3f43bb6bf7e319770cbc0db9df9437e852 \ + --hash=sha256:6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61 +markdown-include==0.8.1 ; python_full_version >= "3.7.2" and python_full_version < "4.0.0" \ + --hash=sha256:1d0623e0fc2757c38d35df53752768356162284259d259c486b4ab6285cdbbe3 \ + --hash=sha256:32f0635b9cfef46997b307e2430022852529f7a5b87c0075c504283e7cc7db53 +markdown==3.3.7 ; python_full_version >= "3.7.2" and python_full_version < "4.0.0" \ + --hash=sha256:cbb516f16218e643d8e0a95b309f77eb118cb138d39a4f27851e6a63581db874 \ + --hash=sha256:f5da449a6e1c989a4cea2631aa8ee67caa5a2ef855d551c88f9e309f4634c621 +markupsafe==2.1.2 ; python_full_version >= "3.7.2" and python_full_version < "4.0.0" \ + --hash=sha256:0576fe974b40a400449768941d5d0858cc624e3249dfd1e0c33674e5c7ca7aed \ + --hash=sha256:085fd3201e7b12809f9e6e9bc1e5c96a368c8523fad5afb02afe3c051ae4afcc \ + --hash=sha256:090376d812fb6ac5f171e5938e82e7f2d7adc2b629101cec0db8b267815c85e2 \ + --hash=sha256:0b462104ba25f1ac006fdab8b6a01ebbfbce9ed37fd37fd4acd70c67c973e460 \ + --hash=sha256:137678c63c977754abe9086a3ec011e8fd985ab90631145dfb9294ad09c102a7 \ + --hash=sha256:1bea30e9bf331f3fef67e0a3877b2288593c98a21ccb2cf29b74c581a4eb3af0 \ + --hash=sha256:22152d00bf4a9c7c83960521fc558f55a1adbc0631fbb00a9471e097b19d72e1 \ + --hash=sha256:22731d79ed2eb25059ae3df1dfc9cb1546691cc41f4e3130fe6bfbc3ecbbecfa \ + --hash=sha256:2298c859cfc5463f1b64bd55cb3e602528db6fa0f3cfd568d3605c50678f8f03 \ + --hash=sha256:28057e985dace2f478e042eaa15606c7efccb700797660629da387eb289b9323 \ + --hash=sha256:2e7821bffe00aa6bd07a23913b7f4e01328c3d5cc0b40b36c0bd81d362faeb65 \ + --hash=sha256:2ec4f2d48ae59bbb9d1f9d7efb9236ab81429a764dedca114f5fdabbc3788013 \ + --hash=sha256:340bea174e9761308703ae988e982005aedf427de816d1afe98147668cc03036 \ + --hash=sha256:40627dcf047dadb22cd25ea7ecfe9cbf3bbbad0482ee5920b582f3809c97654f \ + --hash=sha256:40dfd3fefbef579ee058f139733ac336312663c6706d1163b82b3003fb1925c4 \ + --hash=sha256:4cf06cdc1dda95223e9d2d3c58d3b178aa5dacb35ee7e3bbac10e4e1faacb419 \ + --hash=sha256:50c42830a633fa0cf9e7d27664637532791bfc31c731a87b202d2d8ac40c3ea2 \ + --hash=sha256:55f44b440d491028addb3b88f72207d71eeebfb7b5dbf0643f7c023ae1fba619 \ + --hash=sha256:608e7073dfa9e38a85d38474c082d4281f4ce276ac0010224eaba11e929dd53a \ + --hash=sha256:63ba06c9941e46fa389d389644e2d8225e0e3e5ebcc4ff1ea8506dce646f8c8a \ + --hash=sha256:65608c35bfb8a76763f37036547f7adfd09270fbdbf96608be2bead319728fcd \ + --hash=sha256:665a36ae6f8f20a4676b53224e33d456a6f5a72657d9c83c2aa00765072f31f7 \ + --hash=sha256:6d6607f98fcf17e534162f0709aaad3ab7a96032723d8ac8750ffe17ae5a0666 \ + --hash=sha256:7313ce6a199651c4ed9d7e4cfb4aa56fe923b1adf9af3b420ee14e6d9a73df65 \ + --hash=sha256:7668b52e102d0ed87cb082380a7e2e1e78737ddecdde129acadb0eccc5423859 \ + --hash=sha256:7df70907e00c970c60b9ef2938d894a9381f38e6b9db73c5be35e59d92e06625 \ + --hash=sha256:7e007132af78ea9df29495dbf7b5824cb71648d7133cf7848a2a5dd00d36f9ff \ + --hash=sha256:835fb5e38fd89328e9c81067fd642b3593c33e1e17e2fdbf77f5676abb14a156 \ + --hash=sha256:8bca7e26c1dd751236cfb0c6c72d4ad61d986e9a41bbf76cb445f69488b2a2bd \ + --hash=sha256:8db032bf0ce9022a8e41a22598eefc802314e81b879ae093f36ce9ddf39ab1ba \ + --hash=sha256:99625a92da8229df6d44335e6fcc558a5037dd0a760e11d84be2260e6f37002f \ + --hash=sha256:9cad97ab29dfc3f0249b483412c85c8ef4766d96cdf9dcf5a1e3caa3f3661cf1 \ + --hash=sha256:a4abaec6ca3ad8660690236d11bfe28dfd707778e2442b45addd2f086d6ef094 \ + --hash=sha256:a6e40afa7f45939ca356f348c8e23048e02cb109ced1eb8420961b2f40fb373a \ + --hash=sha256:a6f2fcca746e8d5910e18782f976489939d54a91f9411c32051b4aab2bd7c513 \ + --hash=sha256:a806db027852538d2ad7555b203300173dd1b77ba116de92da9afbc3a3be3eed \ + --hash=sha256:abcabc8c2b26036d62d4c746381a6f7cf60aafcc653198ad678306986b09450d \ + --hash=sha256:b8526c6d437855442cdd3d87eede9c425c4445ea011ca38d937db299382e6fa3 \ + --hash=sha256:bb06feb762bade6bf3c8b844462274db0c76acc95c52abe8dbed28ae3d44a147 \ + --hash=sha256:c0a33bc9f02c2b17c3ea382f91b4db0e6cde90b63b296422a939886a7a80de1c \ + --hash=sha256:c4a549890a45f57f1ebf99c067a4ad0cb423a05544accaf2b065246827ed9603 \ + --hash=sha256:ca244fa73f50a800cf8c3ebf7fd93149ec37f5cb9596aa8873ae2c1d23498601 \ + --hash=sha256:cf877ab4ed6e302ec1d04952ca358b381a882fbd9d1b07cccbfd61783561f98a \ + --hash=sha256:d9d971ec1e79906046aa3ca266de79eac42f1dbf3612a05dc9368125952bd1a1 \ + --hash=sha256:da25303d91526aac3672ee6d49a2f3db2d9502a4a60b55519feb1a4c7714e07d \ + --hash=sha256:e55e40ff0cc8cc5c07996915ad367fa47da6b3fc091fdadca7f5403239c5fec3 \ + --hash=sha256:f03a532d7dee1bed20bc4884194a16160a2de9ffc6354b3878ec9682bb623c54 \ + --hash=sha256:f1cd098434e83e656abf198f103a8207a8187c0fc110306691a2e94a78d0abb2 \ + --hash=sha256:f2bfb563d0211ce16b63c7cb9395d2c682a23187f54c3d79bfec33e6705473c6 \ + --hash=sha256:f8ffb705ffcf5ddd0e80b65ddf7bed7ee4f5a441ea7d3419e861a12eaf41af58 +mergedeep==1.3.4 ; python_full_version >= "3.7.2" and python_full_version < "4.0.0" \ + --hash=sha256:0096d52e9dad9939c3d975a774666af186eda617e6ca84df4c94dec30004f2a8 \ + --hash=sha256:70775750742b25c0d8f36c55aed03d24c3384d17c951b3175d898bd778ef0307 +mkdocs-autorefs==0.4.1 ; python_full_version >= "3.7.2" and python_full_version < "4.0.0" \ + --hash=sha256:70748a7bd025f9ecd6d6feeba8ba63f8e891a1af55f48e366d6d6e78493aba84 \ + --hash=sha256:a2248a9501b29dc0cc8ba4c09f4f47ff121945f6ce33d760f145d6f89d313f5b +mkdocs-material-extensions==1.1.1 ; python_full_version >= "3.7.2" and python_full_version < "4.0.0" \ + --hash=sha256:9c003da71e2cc2493d910237448c672e00cefc800d3d6ae93d2fc69979e3bd93 \ + --hash=sha256:e41d9f38e4798b6617ad98ca8f7f1157b1e4385ac1459ca1e4ea219b556df945 +mkdocs-material==9.1.14 ; python_full_version >= "3.7.2" and python_full_version < "4.0.0" \ + --hash=sha256:1ae74cc5464ef2f64574d4884512efed7f4db386fb9bc6af20fd427d7a702f49 \ + --hash=sha256:b56a9f955ed32d38333715cbbf68ce38f683bf38610c65094fa4ef2db9f08bcd +mkdocs==1.4.3 ; python_full_version >= "3.7.2" and python_full_version < "4.0.0" \ + --hash=sha256:5955093bbd4dd2e9403c5afaf57324ad8b04f16886512a3ee6ef828956481c57 \ + --hash=sha256:6ee46d309bda331aac915cd24aab882c179a933bd9e77b80ce7d2eaaa3f689dd +mkdocstrings-python==1.0.0 ; python_full_version >= "3.7.2" and python_full_version < "4.0.0" \ + --hash=sha256:b89d849df990204f909d5452548b6936a185f912da06208a93909bebe25d6e67 \ + --hash=sha256:c59d67009a7a85172f4da990d8523e95606b6a1ff93a22a2351ad3b5f8cafed1 +mkdocstrings==0.21.2 ; python_full_version >= "3.7.2" and python_full_version < "4.0.0" \ + --hash=sha256:304e56a2e90595708a38a13a278e538a67ad82052dd5c8b71f77a604a4f3d911 \ + --hash=sha256:949ef8da92df9d692ca07be50616459a6b536083a25520fd54b00e8814ce019b +packaging==21.3 ; python_full_version >= "3.7.2" and python_full_version < "4.0.0" \ + --hash=sha256:dd47c42927d89ab911e606518907cc2d3a1f38bbd026385970643f9c5b8ecfeb \ + --hash=sha256:ef103e05f519cdc783ae24ea4e2e0f508a9c99b2d4969652eed6a2e1ea5bd522 +pygments==2.15.1 ; python_full_version >= "3.7.2" and python_full_version < "4.0.0" \ + --hash=sha256:8ace4d3c1dd481894b2005f560ead0f9f19ee64fe983366be1a21e171d12775c \ + --hash=sha256:db2db3deb4b4179f399a09054b023b6a586b76499d36965813c71aa8ed7b5fd1 +pymdown-extensions==10.0.1 ; python_full_version >= "3.7.2" and python_full_version < "4.0.0" \ + --hash=sha256:ae66d84013c5d027ce055693e09a4628b67e9dec5bce05727e45b0918e36f274 \ + --hash=sha256:b44e1093a43b8a975eae17b03c3a77aad4681b3b56fce60ce746dbef1944c8cb +pyparsing==3.0.9 ; python_full_version >= "3.7.2" and python_full_version < "4.0.0" \ + --hash=sha256:2b020ecf7d21b687f219b71ecad3631f644a47f01403fa1d1036b0c6416d70fb \ + --hash=sha256:5026bae9a10eeaefb61dab2f09052b9f4307d44aee4eda64b309723d8d206bbc +python-dateutil==2.8.2 ; python_full_version >= "3.7.2" and python_full_version < "4.0.0" \ + --hash=sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86 \ + --hash=sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9 +pyyaml-env-tag==0.1 ; python_full_version >= "3.7.2" and python_full_version < "4.0.0" \ + --hash=sha256:70092675bda14fdec33b31ba77e7543de9ddc88f2e5b99160396572d11525bdb \ + --hash=sha256:af31106dec8a4d68c60207c1886031cbf839b68aa7abccdb19868200532c2069 +pyyaml==6.0 ; python_full_version >= "3.7.2" and python_full_version < "4.0.0" \ + --hash=sha256:01b45c0191e6d66c470b6cf1b9531a771a83c1c4208272ead47a3ae4f2f603bf \ + --hash=sha256:0283c35a6a9fbf047493e3a0ce8d79ef5030852c51e9d911a27badfde0605293 \ + --hash=sha256:055d937d65826939cb044fc8c9b08889e8c743fdc6a32b33e2390f66013e449b \ + --hash=sha256:07751360502caac1c067a8132d150cf3d61339af5691fe9e87803040dbc5db57 \ + --hash=sha256:0b4624f379dab24d3725ffde76559cff63d9ec94e1736b556dacdfebe5ab6d4b \ + --hash=sha256:0ce82d761c532fe4ec3f87fc45688bdd3a4c1dc5e0b4a19814b9009a29baefd4 \ + --hash=sha256:1e4747bc279b4f613a09eb64bba2ba602d8a6664c6ce6396a4d0cd413a50ce07 \ + --hash=sha256:213c60cd50106436cc818accf5baa1aba61c0189ff610f64f4a3e8c6726218ba \ + --hash=sha256:231710d57adfd809ef5d34183b8ed1eeae3f76459c18fb4a0b373ad56bedcdd9 \ + --hash=sha256:277a0ef2981ca40581a47093e9e2d13b3f1fbbeffae064c1d21bfceba2030287 \ + --hash=sha256:2cd5df3de48857ed0544b34e2d40e9fac445930039f3cfe4bcc592a1f836d513 \ + --hash=sha256:40527857252b61eacd1d9af500c3337ba8deb8fc298940291486c465c8b46ec0 \ + --hash=sha256:432557aa2c09802be39460360ddffd48156e30721f5e8d917f01d31694216782 \ + --hash=sha256:473f9edb243cb1935ab5a084eb238d842fb8f404ed2193a915d1784b5a6b5fc0 \ + --hash=sha256:48c346915c114f5fdb3ead70312bd042a953a8ce5c7106d5bfb1a5254e47da92 \ + --hash=sha256:50602afada6d6cbfad699b0c7bb50d5ccffa7e46a3d738092afddc1f9758427f \ + --hash=sha256:68fb519c14306fec9720a2a5b45bc9f0c8d1b9c72adf45c37baedfcd949c35a2 \ + --hash=sha256:77f396e6ef4c73fdc33a9157446466f1cff553d979bd00ecb64385760c6babdc \ + --hash=sha256:81957921f441d50af23654aa6c5e5eaf9b06aba7f0a19c18a538dc7ef291c5a1 \ + --hash=sha256:819b3830a1543db06c4d4b865e70ded25be52a2e0631ccd2f6a47a2822f2fd7c \ + --hash=sha256:897b80890765f037df3403d22bab41627ca8811ae55e9a722fd0392850ec4d86 \ + --hash=sha256:98c4d36e99714e55cfbaaee6dd5badbc9a1ec339ebfc3b1f52e293aee6bb71a4 \ + --hash=sha256:9df7ed3b3d2e0ecfe09e14741b857df43adb5a3ddadc919a2d94fbdf78fea53c \ + --hash=sha256:9fa600030013c4de8165339db93d182b9431076eb98eb40ee068700c9c813e34 \ + --hash=sha256:a80a78046a72361de73f8f395f1f1e49f956c6be882eed58505a15f3e430962b \ + --hash=sha256:afa17f5bc4d1b10afd4466fd3a44dc0e245382deca5b3c353d8b757f9e3ecb8d \ + --hash=sha256:b3d267842bf12586ba6c734f89d1f5b871df0273157918b0ccefa29deb05c21c \ + --hash=sha256:b5b9eccad747aabaaffbc6064800670f0c297e52c12754eb1d976c57e4f74dcb \ + --hash=sha256:bfaef573a63ba8923503d27530362590ff4f576c626d86a9fed95822a8255fd7 \ + --hash=sha256:c5687b8d43cf58545ade1fe3e055f70eac7a5a1a0bf42824308d868289a95737 \ + --hash=sha256:cba8c411ef271aa037d7357a2bc8f9ee8b58b9965831d9e51baf703280dc73d3 \ + --hash=sha256:d15a181d1ecd0d4270dc32edb46f7cb7733c7c508857278d3d378d14d606db2d \ + --hash=sha256:d4b0ba9512519522b118090257be113b9468d804b19d63c71dbcf4a48fa32358 \ + --hash=sha256:d4db7c7aef085872ef65a8fd7d6d09a14ae91f691dec3e87ee5ee0539d516f53 \ + --hash=sha256:d4eccecf9adf6fbcc6861a38015c2a64f38b9d94838ac1810a9023a0609e1b78 \ + --hash=sha256:d67d839ede4ed1b28a4e8909735fc992a923cdb84e618544973d7dfc71540803 \ + --hash=sha256:daf496c58a8c52083df09b80c860005194014c3698698d1a57cbcfa182142a3a \ + --hash=sha256:dbad0e9d368bb989f4515da330b88a057617d16b6a8245084f1b05400f24609f \ + --hash=sha256:e61ceaab6f49fb8bdfaa0f92c4b57bcfbea54c09277b1b4f7ac376bfb7a7c174 \ + --hash=sha256:f84fbc98b019fef2ee9a1cb3ce93e3187a6df0b2538a651bfb890254ba9f90b5 +regex==2023.5.5 ; python_full_version >= "3.7.2" and python_full_version < "4.0.0" \ + --hash=sha256:02f4541550459c08fdd6f97aa4e24c6f1932eec780d58a2faa2068253df7d6ff \ + --hash=sha256:0a69cf0c00c4d4a929c6c7717fd918414cab0d6132a49a6d8fc3ded1988ed2ea \ + --hash=sha256:0bbd5dcb19603ab8d2781fac60114fb89aee8494f4505ae7ad141a3314abb1f9 \ + --hash=sha256:10250a093741ec7bf74bcd2039e697f519b028518f605ff2aa7ac1e9c9f97423 \ + --hash=sha256:10374c84ee58c44575b667310d5bbfa89fb2e64e52349720a0182c0017512f6c \ + --hash=sha256:1189fbbb21e2c117fda5303653b61905aeeeea23de4a94d400b0487eb16d2d60 \ + --hash=sha256:1307aa4daa1cbb23823d8238e1f61292fd07e4e5d8d38a6efff00b67a7cdb764 \ + --hash=sha256:144b5b017646b5a9392a5554a1e5db0000ae637be4971c9747566775fc96e1b2 \ + --hash=sha256:171c52e320fe29260da550d81c6b99f6f8402450dc7777ef5ced2e848f3b6f8f \ + --hash=sha256:18196c16a584619c7c1d843497c069955d7629ad4a3fdee240eb347f4a2c9dbe \ + --hash=sha256:18f05d14f14a812fe9723f13afafefe6b74ca042d99f8884e62dbd34dcccf3e2 \ + --hash=sha256:1ecf3dcff71f0c0fe3e555201cbe749fa66aae8d18f80d2cc4de8e66df37390a \ + --hash=sha256:21e90a288e6ba4bf44c25c6a946cb9b0f00b73044d74308b5e0afd190338297c \ + --hash=sha256:23d86ad2121b3c4fc78c58f95e19173790e22ac05996df69b84e12da5816cb17 \ + --hash=sha256:256f7f4c6ba145f62f7a441a003c94b8b1af78cee2cccacfc1e835f93bc09426 \ + --hash=sha256:290fd35219486dfbc00b0de72f455ecdd63e59b528991a6aec9fdfc0ce85672e \ + --hash=sha256:2e9c4f778514a560a9c9aa8e5538bee759b55f6c1dcd35613ad72523fd9175b8 \ + --hash=sha256:338994d3d4ca4cf12f09822e025731a5bdd3a37aaa571fa52659e85ca793fb67 \ + --hash=sha256:33d430a23b661629661f1fe8395be2004006bc792bb9fc7c53911d661b69dd7e \ + --hash=sha256:385992d5ecf1a93cb85adff2f73e0402dd9ac29b71b7006d342cc920816e6f32 \ + --hash=sha256:3d45864693351c15531f7e76f545ec35000d50848daa833cead96edae1665559 \ + --hash=sha256:40005cbd383438aecf715a7b47fe1e3dcbc889a36461ed416bdec07e0ef1db66 \ + --hash=sha256:4035d6945cb961c90c3e1c1ca2feb526175bcfed44dfb1cc77db4fdced060d3e \ + --hash=sha256:445d6f4fc3bd9fc2bf0416164454f90acab8858cd5a041403d7a11e3356980e8 \ + --hash=sha256:48c9ec56579d4ba1c88f42302194b8ae2350265cb60c64b7b9a88dcb7fbde309 \ + --hash=sha256:4a5059bd585e9e9504ef9c07e4bc15b0a621ba20504388875d66b8b30a5c4d18 \ + --hash=sha256:4a6e4b0e0531223f53bad07ddf733af490ba2b8367f62342b92b39b29f72735a \ + --hash=sha256:4b870b6f632fc74941cadc2a0f3064ed8409e6f8ee226cdfd2a85ae50473aa94 \ + --hash=sha256:50fd2d9b36938d4dcecbd684777dd12a407add4f9f934f235c66372e630772b0 \ + --hash=sha256:53e22e4460f0245b468ee645156a4f84d0fc35a12d9ba79bd7d79bdcd2f9629d \ + --hash=sha256:586a011f77f8a2da4b888774174cd266e69e917a67ba072c7fc0e91878178a80 \ + --hash=sha256:59597cd6315d3439ed4b074febe84a439c33928dd34396941b4d377692eca810 \ + --hash=sha256:59e4b729eae1a0919f9e4c0fc635fbcc9db59c74ad98d684f4877be3d2607dd6 \ + --hash=sha256:5a0f874ee8c0bc820e649c900243c6d1e6dc435b81da1492046716f14f1a2a96 \ + --hash=sha256:5ac2b7d341dc1bd102be849d6dd33b09701223a851105b2754339e390be0627a \ + --hash=sha256:5e3f4468b8c6fd2fd33c218bbd0a1559e6a6fcf185af8bb0cc43f3b5bfb7d636 \ + --hash=sha256:6164d4e2a82f9ebd7752a06bd6c504791bedc6418c0196cd0a23afb7f3e12b2d \ + --hash=sha256:6893544e06bae009916a5658ce7207e26ed17385149f35a3125f5259951f1bbe \ + --hash=sha256:690a17db524ee6ac4a27efc5406530dd90e7a7a69d8360235323d0e5dafb8f5b \ + --hash=sha256:6b8d0c153f07a953636b9cdb3011b733cadd4178123ef728ccc4d5969e67f3c2 \ + --hash=sha256:72a28979cc667e5f82ef433db009184e7ac277844eea0f7f4d254b789517941d \ + --hash=sha256:72aa4746993a28c841e05889f3f1b1e5d14df8d3daa157d6001a34c98102b393 \ + --hash=sha256:732176f5427e72fa2325b05c58ad0b45af341c459910d766f814b0584ac1f9ac \ + --hash=sha256:7918a1b83dd70dc04ab5ed24c78ae833ae8ea228cef84e08597c408286edc926 \ + --hash=sha256:7923470d6056a9590247ff729c05e8e0f06bbd4efa6569c916943cb2d9b68b91 \ + --hash=sha256:7d76a8a1fc9da08296462a18f16620ba73bcbf5909e42383b253ef34d9d5141e \ + --hash=sha256:811040d7f3dd9c55eb0d8b00b5dcb7fd9ae1761c454f444fd9f37fe5ec57143a \ + --hash=sha256:821a88b878b6589c5068f4cc2cfeb2c64e343a196bc9d7ac68ea8c2a776acd46 \ + --hash=sha256:84397d3f750d153ebd7f958efaa92b45fea170200e2df5e0e1fd4d85b7e3f58a \ + --hash=sha256:844671c9c1150fcdac46d43198364034b961bd520f2c4fdaabfc7c7d7138a2dd \ + --hash=sha256:890a09cb0a62198bff92eda98b2b507305dd3abf974778bae3287f98b48907d3 \ + --hash=sha256:8f08276466fedb9e36e5193a96cb944928301152879ec20c2d723d1031cd4ddd \ + --hash=sha256:8f5e06df94fff8c4c85f98c6487f6636848e1dc85ce17ab7d1931df4a081f657 \ + --hash=sha256:921473a93bcea4d00295799ab929522fc650e85c6b9f27ae1e6bb32a790ea7d3 \ + --hash=sha256:941b3f1b2392f0bcd6abf1bc7a322787d6db4e7457be6d1ffd3a693426a755f2 \ + --hash=sha256:9b320677521aabf666cdd6e99baee4fb5ac3996349c3b7f8e7c4eee1c00dfe3a \ + --hash=sha256:9c3efee9bb53cbe7b285760c81f28ac80dc15fa48b5fe7e58b52752e642553f1 \ + --hash=sha256:9fda3e50abad8d0f48df621cf75adc73c63f7243cbe0e3b2171392b445401550 \ + --hash=sha256:a4c5da39bca4f7979eefcbb36efea04471cd68db2d38fcbb4ee2c6d440699833 \ + --hash=sha256:a56c18f21ac98209da9c54ae3ebb3b6f6e772038681d6cb43b8d53da3b09ee81 \ + --hash=sha256:a623564d810e7a953ff1357f7799c14bc9beeab699aacc8b7ab7822da1e952b8 \ + --hash=sha256:a8906669b03c63266b6a7693d1f487b02647beb12adea20f8840c1a087e2dfb5 \ + --hash=sha256:a99757ad7fe5c8a2bb44829fc57ced11253e10f462233c1255fe03888e06bc19 \ + --hash=sha256:aa7d032c1d84726aa9edeb6accf079b4caa87151ca9fabacef31fa028186c66d \ + --hash=sha256:aad5524c2aedaf9aa14ef1bc9327f8abd915699dea457d339bebbe2f0d218f86 \ + --hash=sha256:afb1c70ec1e594a547f38ad6bf5e3d60304ce7539e677c1429eebab115bce56e \ + --hash=sha256:b6365703e8cf1644b82104cdd05270d1a9f043119a168d66c55684b1b557d008 \ + --hash=sha256:b8b942d8b3ce765dbc3b1dad0a944712a89b5de290ce8f72681e22b3c55f3cc8 \ + --hash=sha256:ba73a14e9c8f9ac409863543cde3290dba39098fc261f717dc337ea72d3ebad2 \ + --hash=sha256:bd7b68fd2e79d59d86dcbc1ccd6e2ca09c505343445daaa4e07f43c8a9cc34da \ + --hash=sha256:bd966475e963122ee0a7118ec9024388c602d12ac72860f6eea119a3928be053 \ + --hash=sha256:c2ce65bdeaf0a386bb3b533a28de3994e8e13b464ac15e1e67e4603dd88787fa \ + --hash=sha256:c64d5abe91a3dfe5ff250c6bb267ef00dbc01501518225b45a5f9def458f31fb \ + --hash=sha256:c8c143a65ce3ca42e54d8e6fcaf465b6b672ed1c6c90022794a802fb93105d22 \ + --hash=sha256:cd46f30e758629c3ee91713529cfbe107ac50d27110fdcc326a42ce2acf4dafc \ + --hash=sha256:ced02e3bd55e16e89c08bbc8128cff0884d96e7f7a5633d3dc366b6d95fcd1d6 \ + --hash=sha256:cf123225945aa58b3057d0fba67e8061c62d14cc8a4202630f8057df70189051 \ + --hash=sha256:d19e57f888b00cd04fc38f5e18d0efbd91ccba2d45039453ab2236e6eec48d4d \ + --hash=sha256:d1cbe6b5be3b9b698d8cc4ee4dee7e017ad655e83361cd0ea8e653d65e469468 \ + --hash=sha256:db09e6c18977a33fea26fe67b7a842f706c67cf8bda1450974d0ae0dd63570df \ + --hash=sha256:de2f780c3242ea114dd01f84848655356af4dd561501896c751d7b885ea6d3a1 \ + --hash=sha256:e2205a81f815b5bb17e46e74cc946c575b484e5f0acfcb805fb252d67e22938d \ + --hash=sha256:e645c757183ee0e13f0bbe56508598e2d9cd42b8abc6c0599d53b0d0b8dd1479 \ + --hash=sha256:f2910502f718828cecc8beff004917dcf577fc5f8f5dd40ffb1ea7612124547b \ + --hash=sha256:f764e4dfafa288e2eba21231f455d209f4709436baeebb05bdecfb5d8ddc3d35 \ + --hash=sha256:f83fe9e10f9d0b6cf580564d4d23845b9d692e4c91bd8be57733958e4c602956 \ + --hash=sha256:fb2b495dd94b02de8215625948132cc2ea360ae84fe6634cd19b6567709c8ae2 \ + --hash=sha256:fee0016cc35a8a91e8cc9312ab26a6fe638d484131a7afa79e1ce6165328a135 +requests==2.31.0 ; python_full_version >= "3.7.2" and python_full_version < "4.0.0" \ + --hash=sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f \ + --hash=sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1 +six==1.16.0 ; python_full_version >= "3.7.2" and python_full_version < "4.0.0" \ + --hash=sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926 \ + --hash=sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254 +typing-extensions==4.4.0 ; python_full_version >= "3.7.2" and python_version < "3.10" \ + --hash=sha256:1511434bb92bf8dd198c12b1cc812e800d4181cfcb867674e0f8279cc93087aa \ + --hash=sha256:16fa4864408f655d35ec496218b85f79b3437c829e93320c7c9215ccfd92489e +urllib3==1.26.13 ; python_full_version >= "3.7.2" and python_full_version < "4.0.0" \ + --hash=sha256:47cc05d99aaa09c9e72ed5809b60e7ba354e64b59c9c173ac3018642d8bb41fc \ + --hash=sha256:c083dd0dce68dbfbe1129d5271cb90f9447dea7d52097c6e0126120c521ddea8 +watchdog==3.0.0 ; python_full_version >= "3.7.2" and python_full_version < "4.0.0" \ + --hash=sha256:0e06ab8858a76e1219e68c7573dfeba9dd1c0219476c5a44d5333b01d7e1743a \ + --hash=sha256:13bbbb462ee42ec3c5723e1205be8ced776f05b100e4737518c67c8325cf6100 \ + --hash=sha256:233b5817932685d39a7896b1090353fc8efc1ef99c9c054e46c8002561252fb8 \ + --hash=sha256:25f70b4aa53bd743729c7475d7ec41093a580528b100e9a8c5b5efe8899592fc \ + --hash=sha256:2b57a1e730af3156d13b7fdddfc23dea6487fceca29fc75c5a868beed29177ae \ + --hash=sha256:336adfc6f5cc4e037d52db31194f7581ff744b67382eb6021c868322e32eef41 \ + --hash=sha256:3aa7f6a12e831ddfe78cdd4f8996af9cf334fd6346531b16cec61c3b3c0d8da0 \ + --hash=sha256:3ed7c71a9dccfe838c2f0b6314ed0d9b22e77d268c67e015450a29036a81f60f \ + --hash=sha256:4c9956d27be0bb08fc5f30d9d0179a855436e655f046d288e2bcc11adfae893c \ + --hash=sha256:4d98a320595da7a7c5a18fc48cb633c2e73cda78f93cac2ef42d42bf609a33f9 \ + --hash=sha256:4f94069eb16657d2c6faada4624c39464f65c05606af50bb7902e036e3219be3 \ + --hash=sha256:5113334cf8cf0ac8cd45e1f8309a603291b614191c9add34d33075727a967709 \ + --hash=sha256:51f90f73b4697bac9c9a78394c3acbbd331ccd3655c11be1a15ae6fe289a8c83 \ + --hash=sha256:5d9f3a10e02d7371cd929b5d8f11e87d4bad890212ed3901f9b4d68767bee759 \ + --hash=sha256:7ade88d0d778b1b222adebcc0927428f883db07017618a5e684fd03b83342bd9 \ + --hash=sha256:7c5f84b5194c24dd573fa6472685b2a27cc5a17fe5f7b6fd40345378ca6812e3 \ + --hash=sha256:7e447d172af52ad204d19982739aa2346245cc5ba6f579d16dac4bfec226d2e7 \ + --hash=sha256:8ae9cda41fa114e28faf86cb137d751a17ffd0316d1c34ccf2235e8a84365c7f \ + --hash=sha256:8f3ceecd20d71067c7fd4c9e832d4e22584318983cabc013dbf3f70ea95de346 \ + --hash=sha256:9fac43a7466eb73e64a9940ac9ed6369baa39b3bf221ae23493a9ec4d0022674 \ + --hash=sha256:a70a8dcde91be523c35b2bf96196edc5730edb347e374c7de7cd20c43ed95397 \ + --hash=sha256:adfdeab2da79ea2f76f87eb42a3ab1966a5313e5a69a0213a3cc06ef692b0e96 \ + --hash=sha256:ba07e92756c97e3aca0912b5cbc4e5ad802f4557212788e72a72a47ff376950d \ + --hash=sha256:c07253088265c363d1ddf4b3cdb808d59a0468ecd017770ed716991620b8f77a \ + --hash=sha256:c9d8c8ec7efb887333cf71e328e39cffbf771d8f8f95d308ea4125bf5f90ba64 \ + --hash=sha256:d00e6be486affb5781468457b21a6cbe848c33ef43f9ea4a73b4882e5f188a44 \ + --hash=sha256:d429c2430c93b7903914e4db9a966c7f2b068dd2ebdd2fa9b9ce094c7d459f33 +zipp==3.11.0 ; python_full_version >= "3.7.2" and python_version < "3.10" \ + --hash=sha256:83a28fcb75844b5c0cdaf5aa4003c2d728c77e05f5aeabe8e95e56727005fbaa \ + --hash=sha256:a7a22e05929290a67401440b39690ae6563279bced5f314609d9d03798f56766 diff --git a/mkdocs.yml b/mkdocs.yml new file mode 100644 index 00000000..89a345e5 --- /dev/null +++ b/mkdocs.yml @@ -0,0 +1,17 @@ +site_name: AWS RDK Documentation +theme: + name: material + palette: + scheme: default + primary: orange +plugins: + - search + # TODO: Enable this if/when docstrings are expanded in the core rdk module. + # - mkdocstrings: + # handlers: + # python: + # paths: [rdk] +markdown_extensions: + - markdown_include.include: + base_path: . +docs_dir: docs diff --git a/poetry.lock b/poetry.lock index 7d94b801..f24e6c0c 100644 --- a/poetry.lock +++ b/poetry.lock @@ -489,6 +489,18 @@ files = [ types-awscrt = "*" typing-extensions = {version = ">=4.1.0", markers = "python_version < \"3.9\""} +[[package]] +name = "cached-property" +version = "1.5.2" +description = "A decorator for caching properties in classes." +category = "dev" +optional = false +python-versions = "*" +files = [ + {file = "cached-property-1.5.2.tar.gz", hash = "sha256:9fa5755838eecbb2d234c3aa390bd80fbd3ac6b6869109bfc1b499f7bd89a130"}, + {file = "cached_property-1.5.2-py2.py3-none-any.whl", hash = "sha256:df4f613cf7ad9a588cc381aaf4a512d26265ecebd5eb9e1ba12f1319eb85a6a0"}, +] + [[package]] name = "certifi" version = "2022.12.7" @@ -592,6 +604,24 @@ toml = "*" conda = ["pyyaml"] pipenv = ["pipenv"] +[[package]] +name = "ghp-import" +version = "2.1.0" +description = "Copy your docs directly to the gh-pages branch." +category = "dev" +optional = false +python-versions = "*" +files = [ + {file = "ghp-import-2.1.0.tar.gz", hash = "sha256:9c535c4c61193c2df8871222567d7fd7e5014d835f97dc7b7439069e2413d343"}, + {file = "ghp_import-2.1.0-py3-none-any.whl", hash = "sha256:8337dd7b50877f163d4c0289bc1f1c7f127550241988d568c1db512c4324a619"}, +] + +[package.dependencies] +python-dateutil = ">=2.8.1" + +[package.extras] +dev = ["flake8", "markdown", "twine", "wheel"] + [[package]] name = "gitdb" version = "4.0.10" @@ -623,6 +653,22 @@ files = [ gitdb = ">=4.0.1,<5" typing-extensions = {version = ">=3.7.4.3", markers = "python_version < \"3.8\""} +[[package]] +name = "griffe" +version = "0.28.2" +description = "Signatures for entire Python programs. Extract the structure, the frame, the skeleton of your project, to generate API documentation or find breaking changes in your API." +category = "dev" +optional = false +python-versions = ">=3.7" +files = [ + {file = "griffe-0.28.2-py3-none-any.whl", hash = "sha256:bde3a3dfa301a4b113c7fac3b2be45e5723bc50cda4c9cfe13f43c447c9aa5d1"}, + {file = "griffe-0.28.2.tar.gz", hash = "sha256:a471498b0b9505c721ea0e652fd77c97df1aeb56c4eb8c93d24bb1140da4216d"}, +] + +[package.dependencies] +cached-property = {version = "*", markers = "python_version < \"3.8\""} +colorama = ">=0.4" + [[package]] name = "idna" version = "3.4" @@ -674,6 +720,24 @@ pipfile-deprecated-finder = ["pipreqs", "requirementslib"] plugins = ["setuptools"] requirements-deprecated-finder = ["pip-api", "pipreqs"] +[[package]] +name = "jinja2" +version = "3.1.2" +description = "A very fast and expressive template engine." +category = "dev" +optional = false +python-versions = ">=3.7" +files = [ + {file = "Jinja2-3.1.2-py3-none-any.whl", hash = "sha256:6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61"}, + {file = "Jinja2-3.1.2.tar.gz", hash = "sha256:31351a702a408a9e7595a8fc6150fc3f43bb6bf7e319770cbc0db9df9437e852"}, +] + +[package.dependencies] +MarkupSafe = ">=2.0" + +[package.extras] +i18n = ["Babel (>=2.7)"] + [[package]] name = "jmespath" version = "1.0.1" @@ -686,6 +750,42 @@ files = [ {file = "jmespath-1.0.1.tar.gz", hash = "sha256:90261b206d6defd58fdd5e85f478bf633a2901798906be2ad389150c5c60edbe"}, ] +[[package]] +name = "markdown" +version = "3.3.7" +description = "Python implementation of Markdown." +category = "dev" +optional = false +python-versions = ">=3.6" +files = [ + {file = "Markdown-3.3.7-py3-none-any.whl", hash = "sha256:f5da449a6e1c989a4cea2631aa8ee67caa5a2ef855d551c88f9e309f4634c621"}, + {file = "Markdown-3.3.7.tar.gz", hash = "sha256:cbb516f16218e643d8e0a95b309f77eb118cb138d39a4f27851e6a63581db874"}, +] + +[package.dependencies] +importlib-metadata = {version = ">=4.4", markers = "python_version < \"3.10\""} + +[package.extras] +testing = ["coverage", "pyyaml"] + +[[package]] +name = "markdown-include" +version = "0.8.1" +description = "A Python-Markdown extension which provides an 'include' function" +category = "dev" +optional = false +python-versions = ">=3.7" +files = [ + {file = "markdown-include-0.8.1.tar.gz", hash = "sha256:1d0623e0fc2757c38d35df53752768356162284259d259c486b4ab6285cdbbe3"}, + {file = "markdown_include-0.8.1-py3-none-any.whl", hash = "sha256:32f0635b9cfef46997b307e2430022852529f7a5b87c0075c504283e7cc7db53"}, +] + +[package.dependencies] +markdown = ">=3.0" + +[package.extras] +tests = ["pytest"] + [[package]] name = "markdown-it-py" version = "2.2.0" @@ -712,6 +812,66 @@ profiling = ["gprof2dot"] rtd = ["attrs", "myst-parser", "pyyaml", "sphinx", "sphinx-copybutton", "sphinx-design", "sphinx_book_theme"] testing = ["coverage", "pytest", "pytest-cov", "pytest-regressions"] +[[package]] +name = "markupsafe" +version = "2.1.2" +description = "Safely add untrusted strings to HTML/XML markup." +category = "dev" +optional = false +python-versions = ">=3.7" +files = [ + {file = "MarkupSafe-2.1.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:665a36ae6f8f20a4676b53224e33d456a6f5a72657d9c83c2aa00765072f31f7"}, + {file = "MarkupSafe-2.1.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:340bea174e9761308703ae988e982005aedf427de816d1afe98147668cc03036"}, + {file = "MarkupSafe-2.1.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22152d00bf4a9c7c83960521fc558f55a1adbc0631fbb00a9471e097b19d72e1"}, + {file = "MarkupSafe-2.1.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:28057e985dace2f478e042eaa15606c7efccb700797660629da387eb289b9323"}, + {file = "MarkupSafe-2.1.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ca244fa73f50a800cf8c3ebf7fd93149ec37f5cb9596aa8873ae2c1d23498601"}, + {file = "MarkupSafe-2.1.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:d9d971ec1e79906046aa3ca266de79eac42f1dbf3612a05dc9368125952bd1a1"}, + {file = "MarkupSafe-2.1.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:7e007132af78ea9df29495dbf7b5824cb71648d7133cf7848a2a5dd00d36f9ff"}, + {file = "MarkupSafe-2.1.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:7313ce6a199651c4ed9d7e4cfb4aa56fe923b1adf9af3b420ee14e6d9a73df65"}, + {file = "MarkupSafe-2.1.2-cp310-cp310-win32.whl", hash = "sha256:c4a549890a45f57f1ebf99c067a4ad0cb423a05544accaf2b065246827ed9603"}, + {file = "MarkupSafe-2.1.2-cp310-cp310-win_amd64.whl", hash = "sha256:835fb5e38fd89328e9c81067fd642b3593c33e1e17e2fdbf77f5676abb14a156"}, + {file = "MarkupSafe-2.1.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:2ec4f2d48ae59bbb9d1f9d7efb9236ab81429a764dedca114f5fdabbc3788013"}, + {file = "MarkupSafe-2.1.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:608e7073dfa9e38a85d38474c082d4281f4ce276ac0010224eaba11e929dd53a"}, + {file = "MarkupSafe-2.1.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:65608c35bfb8a76763f37036547f7adfd09270fbdbf96608be2bead319728fcd"}, + {file = "MarkupSafe-2.1.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f2bfb563d0211ce16b63c7cb9395d2c682a23187f54c3d79bfec33e6705473c6"}, + {file = "MarkupSafe-2.1.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:da25303d91526aac3672ee6d49a2f3db2d9502a4a60b55519feb1a4c7714e07d"}, + {file = "MarkupSafe-2.1.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:9cad97ab29dfc3f0249b483412c85c8ef4766d96cdf9dcf5a1e3caa3f3661cf1"}, + {file = "MarkupSafe-2.1.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:085fd3201e7b12809f9e6e9bc1e5c96a368c8523fad5afb02afe3c051ae4afcc"}, + {file = "MarkupSafe-2.1.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:1bea30e9bf331f3fef67e0a3877b2288593c98a21ccb2cf29b74c581a4eb3af0"}, + {file = "MarkupSafe-2.1.2-cp311-cp311-win32.whl", hash = "sha256:7df70907e00c970c60b9ef2938d894a9381f38e6b9db73c5be35e59d92e06625"}, + {file = "MarkupSafe-2.1.2-cp311-cp311-win_amd64.whl", hash = "sha256:e55e40ff0cc8cc5c07996915ad367fa47da6b3fc091fdadca7f5403239c5fec3"}, + {file = "MarkupSafe-2.1.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:a6e40afa7f45939ca356f348c8e23048e02cb109ced1eb8420961b2f40fb373a"}, + {file = "MarkupSafe-2.1.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cf877ab4ed6e302ec1d04952ca358b381a882fbd9d1b07cccbfd61783561f98a"}, + {file = "MarkupSafe-2.1.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:63ba06c9941e46fa389d389644e2d8225e0e3e5ebcc4ff1ea8506dce646f8c8a"}, + {file = "MarkupSafe-2.1.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f1cd098434e83e656abf198f103a8207a8187c0fc110306691a2e94a78d0abb2"}, + {file = "MarkupSafe-2.1.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:55f44b440d491028addb3b88f72207d71eeebfb7b5dbf0643f7c023ae1fba619"}, + {file = "MarkupSafe-2.1.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:a6f2fcca746e8d5910e18782f976489939d54a91f9411c32051b4aab2bd7c513"}, + {file = "MarkupSafe-2.1.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:0b462104ba25f1ac006fdab8b6a01ebbfbce9ed37fd37fd4acd70c67c973e460"}, + {file = "MarkupSafe-2.1.2-cp37-cp37m-win32.whl", hash = "sha256:7668b52e102d0ed87cb082380a7e2e1e78737ddecdde129acadb0eccc5423859"}, + {file = "MarkupSafe-2.1.2-cp37-cp37m-win_amd64.whl", hash = "sha256:6d6607f98fcf17e534162f0709aaad3ab7a96032723d8ac8750ffe17ae5a0666"}, + {file = "MarkupSafe-2.1.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:a806db027852538d2ad7555b203300173dd1b77ba116de92da9afbc3a3be3eed"}, + {file = "MarkupSafe-2.1.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:a4abaec6ca3ad8660690236d11bfe28dfd707778e2442b45addd2f086d6ef094"}, + {file = "MarkupSafe-2.1.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f03a532d7dee1bed20bc4884194a16160a2de9ffc6354b3878ec9682bb623c54"}, + {file = "MarkupSafe-2.1.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4cf06cdc1dda95223e9d2d3c58d3b178aa5dacb35ee7e3bbac10e4e1faacb419"}, + {file = "MarkupSafe-2.1.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:22731d79ed2eb25059ae3df1dfc9cb1546691cc41f4e3130fe6bfbc3ecbbecfa"}, + {file = "MarkupSafe-2.1.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:f8ffb705ffcf5ddd0e80b65ddf7bed7ee4f5a441ea7d3419e861a12eaf41af58"}, + {file = "MarkupSafe-2.1.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:8db032bf0ce9022a8e41a22598eefc802314e81b879ae093f36ce9ddf39ab1ba"}, + {file = "MarkupSafe-2.1.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:2298c859cfc5463f1b64bd55cb3e602528db6fa0f3cfd568d3605c50678f8f03"}, + {file = "MarkupSafe-2.1.2-cp38-cp38-win32.whl", hash = "sha256:50c42830a633fa0cf9e7d27664637532791bfc31c731a87b202d2d8ac40c3ea2"}, + {file = "MarkupSafe-2.1.2-cp38-cp38-win_amd64.whl", hash = "sha256:bb06feb762bade6bf3c8b844462274db0c76acc95c52abe8dbed28ae3d44a147"}, + {file = "MarkupSafe-2.1.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:99625a92da8229df6d44335e6fcc558a5037dd0a760e11d84be2260e6f37002f"}, + {file = "MarkupSafe-2.1.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8bca7e26c1dd751236cfb0c6c72d4ad61d986e9a41bbf76cb445f69488b2a2bd"}, + {file = "MarkupSafe-2.1.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:40627dcf047dadb22cd25ea7ecfe9cbf3bbbad0482ee5920b582f3809c97654f"}, + {file = "MarkupSafe-2.1.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:40dfd3fefbef579ee058f139733ac336312663c6706d1163b82b3003fb1925c4"}, + {file = "MarkupSafe-2.1.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:090376d812fb6ac5f171e5938e82e7f2d7adc2b629101cec0db8b267815c85e2"}, + {file = "MarkupSafe-2.1.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:2e7821bffe00aa6bd07a23913b7f4e01328c3d5cc0b40b36c0bd81d362faeb65"}, + {file = "MarkupSafe-2.1.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:c0a33bc9f02c2b17c3ea382f91b4db0e6cde90b63b296422a939886a7a80de1c"}, + {file = "MarkupSafe-2.1.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:b8526c6d437855442cdd3d87eede9c425c4445ea011ca38d937db299382e6fa3"}, + {file = "MarkupSafe-2.1.2-cp39-cp39-win32.whl", hash = "sha256:137678c63c977754abe9086a3ec011e8fd985ab90631145dfb9294ad09c102a7"}, + {file = "MarkupSafe-2.1.2-cp39-cp39-win_amd64.whl", hash = "sha256:0576fe974b40a400449768941d5d0858cc624e3249dfd1e0c33674e5c7ca7aed"}, + {file = "MarkupSafe-2.1.2.tar.gz", hash = "sha256:abcabc8c2b26036d62d4c746381a6f7cf60aafcc653198ad678306986b09450d"}, +] + [[package]] name = "mdurl" version = "0.1.2" @@ -724,6 +884,141 @@ files = [ {file = "mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba"}, ] +[[package]] +name = "mergedeep" +version = "1.3.4" +description = "A deep merge function for 🐍." +category = "dev" +optional = false +python-versions = ">=3.6" +files = [ + {file = "mergedeep-1.3.4-py3-none-any.whl", hash = "sha256:70775750742b25c0d8f36c55aed03d24c3384d17c951b3175d898bd778ef0307"}, + {file = "mergedeep-1.3.4.tar.gz", hash = "sha256:0096d52e9dad9939c3d975a774666af186eda617e6ca84df4c94dec30004f2a8"}, +] + +[[package]] +name = "mkdocs" +version = "1.4.3" +description = "Project documentation with Markdown." +category = "dev" +optional = false +python-versions = ">=3.7" +files = [ + {file = "mkdocs-1.4.3-py3-none-any.whl", hash = "sha256:6ee46d309bda331aac915cd24aab882c179a933bd9e77b80ce7d2eaaa3f689dd"}, + {file = "mkdocs-1.4.3.tar.gz", hash = "sha256:5955093bbd4dd2e9403c5afaf57324ad8b04f16886512a3ee6ef828956481c57"}, +] + +[package.dependencies] +click = ">=7.0" +colorama = {version = ">=0.4", markers = "platform_system == \"Windows\""} +ghp-import = ">=1.0" +importlib-metadata = {version = ">=4.3", markers = "python_version < \"3.10\""} +jinja2 = ">=2.11.1" +markdown = ">=3.2.1,<3.4" +mergedeep = ">=1.3.4" +packaging = ">=20.5" +pyyaml = ">=5.1" +pyyaml-env-tag = ">=0.1" +typing-extensions = {version = ">=3.10", markers = "python_version < \"3.8\""} +watchdog = ">=2.0" + +[package.extras] +i18n = ["babel (>=2.9.0)"] +min-versions = ["babel (==2.9.0)", "click (==7.0)", "colorama (==0.4)", "ghp-import (==1.0)", "importlib-metadata (==4.3)", "jinja2 (==2.11.1)", "markdown (==3.2.1)", "markupsafe (==2.0.1)", "mergedeep (==1.3.4)", "packaging (==20.5)", "pyyaml (==5.1)", "pyyaml-env-tag (==0.1)", "typing-extensions (==3.10)", "watchdog (==2.0)"] + +[[package]] +name = "mkdocs-autorefs" +version = "0.4.1" +description = "Automatically link across pages in MkDocs." +category = "dev" +optional = false +python-versions = ">=3.7" +files = [ + {file = "mkdocs-autorefs-0.4.1.tar.gz", hash = "sha256:70748a7bd025f9ecd6d6feeba8ba63f8e891a1af55f48e366d6d6e78493aba84"}, + {file = "mkdocs_autorefs-0.4.1-py3-none-any.whl", hash = "sha256:a2248a9501b29dc0cc8ba4c09f4f47ff121945f6ce33d760f145d6f89d313f5b"}, +] + +[package.dependencies] +Markdown = ">=3.3" +mkdocs = ">=1.1" + +[[package]] +name = "mkdocs-material" +version = "9.1.14" +description = "Documentation that simply works" +category = "dev" +optional = false +python-versions = ">=3.7" +files = [ + {file = "mkdocs_material-9.1.14-py3-none-any.whl", hash = "sha256:b56a9f955ed32d38333715cbbf68ce38f683bf38610c65094fa4ef2db9f08bcd"}, + {file = "mkdocs_material-9.1.14.tar.gz", hash = "sha256:1ae74cc5464ef2f64574d4884512efed7f4db386fb9bc6af20fd427d7a702f49"}, +] + +[package.dependencies] +colorama = ">=0.4" +jinja2 = ">=3.0" +markdown = ">=3.2" +mkdocs = ">=1.4.2" +mkdocs-material-extensions = ">=1.1" +pygments = ">=2.14" +pymdown-extensions = ">=9.9.1" +regex = ">=2022.4.24" +requests = ">=2.26" + +[[package]] +name = "mkdocs-material-extensions" +version = "1.1.1" +description = "Extension pack for Python Markdown and MkDocs Material." +category = "dev" +optional = false +python-versions = ">=3.7" +files = [ + {file = "mkdocs_material_extensions-1.1.1-py3-none-any.whl", hash = "sha256:e41d9f38e4798b6617ad98ca8f7f1157b1e4385ac1459ca1e4ea219b556df945"}, + {file = "mkdocs_material_extensions-1.1.1.tar.gz", hash = "sha256:9c003da71e2cc2493d910237448c672e00cefc800d3d6ae93d2fc69979e3bd93"}, +] + +[[package]] +name = "mkdocstrings" +version = "0.21.2" +description = "Automatic documentation from sources, for MkDocs." +category = "dev" +optional = false +python-versions = ">=3.7" +files = [ + {file = "mkdocstrings-0.21.2-py3-none-any.whl", hash = "sha256:949ef8da92df9d692ca07be50616459a6b536083a25520fd54b00e8814ce019b"}, + {file = "mkdocstrings-0.21.2.tar.gz", hash = "sha256:304e56a2e90595708a38a13a278e538a67ad82052dd5c8b71f77a604a4f3d911"}, +] + +[package.dependencies] +Jinja2 = ">=2.11.1" +Markdown = ">=3.3" +MarkupSafe = ">=1.1" +mkdocs = ">=1.2" +mkdocs-autorefs = ">=0.3.1" +pymdown-extensions = ">=6.3" +typing-extensions = {version = ">=4.1", markers = "python_version < \"3.10\""} + +[package.extras] +crystal = ["mkdocstrings-crystal (>=0.3.4)"] +python = ["mkdocstrings-python (>=0.5.2)"] +python-legacy = ["mkdocstrings-python-legacy (>=0.2.1)"] + +[[package]] +name = "mkdocstrings-python" +version = "1.0.0" +description = "A Python handler for mkdocstrings." +category = "dev" +optional = false +python-versions = ">=3.7" +files = [ + {file = "mkdocstrings_python-1.0.0-py3-none-any.whl", hash = "sha256:c59d67009a7a85172f4da990d8523e95606b6a1ff93a22a2351ad3b5f8cafed1"}, + {file = "mkdocstrings_python-1.0.0.tar.gz", hash = "sha256:b89d849df990204f909d5452548b6936a185f912da06208a93909bebe25d6e67"}, +] + +[package.dependencies] +griffe = ">=0.24" +mkdocstrings = ">=0.20" + [[package]] name = "mypy" version = "1.3.0" @@ -951,6 +1246,22 @@ files = [ [package.extras] plugins = ["importlib-metadata"] +[[package]] +name = "pymdown-extensions" +version = "10.0.1" +description = "Extension pack for Python Markdown." +category = "dev" +optional = false +python-versions = ">=3.7" +files = [ + {file = "pymdown_extensions-10.0.1-py3-none-any.whl", hash = "sha256:ae66d84013c5d027ce055693e09a4628b67e9dec5bce05727e45b0918e36f274"}, + {file = "pymdown_extensions-10.0.1.tar.gz", hash = "sha256:b44e1093a43b8a975eae17b03c3a77aad4681b3b56fce60ce746dbef1944c8cb"}, +] + +[package.dependencies] +markdown = ">=3.2" +pyyaml = "*" + [[package]] name = "pyparsing" version = "3.0.9" @@ -1031,6 +1342,21 @@ files = [ {file = "PyYAML-6.0.tar.gz", hash = "sha256:68fb519c14306fec9720a2a5b45bc9f0c8d1b9c72adf45c37baedfcd949c35a2"}, ] +[[package]] +name = "pyyaml-env-tag" +version = "0.1" +description = "A custom YAML tag for referencing environment variables in YAML files. " +category = "dev" +optional = false +python-versions = ">=3.6" +files = [ + {file = "pyyaml_env_tag-0.1-py3-none-any.whl", hash = "sha256:af31106dec8a4d68c60207c1886031cbf839b68aa7abccdb19868200532c2069"}, + {file = "pyyaml_env_tag-0.1.tar.gz", hash = "sha256:70092675bda14fdec33b31ba77e7543de9ddc88f2e5b99160396572d11525bdb"}, +] + +[package.dependencies] +pyyaml = "*" + [[package]] name = "rdklib" version = "0.3.0" @@ -1048,6 +1374,104 @@ boto3 = "*" botocore = "*" rdk = "*" +[[package]] +name = "regex" +version = "2023.5.5" +description = "Alternative regular expression module, to replace re." +category = "dev" +optional = false +python-versions = ">=3.6" +files = [ + {file = "regex-2023.5.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:48c9ec56579d4ba1c88f42302194b8ae2350265cb60c64b7b9a88dcb7fbde309"}, + {file = "regex-2023.5.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:02f4541550459c08fdd6f97aa4e24c6f1932eec780d58a2faa2068253df7d6ff"}, + {file = "regex-2023.5.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:53e22e4460f0245b468ee645156a4f84d0fc35a12d9ba79bd7d79bdcd2f9629d"}, + {file = "regex-2023.5.5-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4b870b6f632fc74941cadc2a0f3064ed8409e6f8ee226cdfd2a85ae50473aa94"}, + {file = "regex-2023.5.5-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:171c52e320fe29260da550d81c6b99f6f8402450dc7777ef5ced2e848f3b6f8f"}, + {file = "regex-2023.5.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aad5524c2aedaf9aa14ef1bc9327f8abd915699dea457d339bebbe2f0d218f86"}, + {file = "regex-2023.5.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5a0f874ee8c0bc820e649c900243c6d1e6dc435b81da1492046716f14f1a2a96"}, + {file = "regex-2023.5.5-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:e645c757183ee0e13f0bbe56508598e2d9cd42b8abc6c0599d53b0d0b8dd1479"}, + {file = "regex-2023.5.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:a4c5da39bca4f7979eefcbb36efea04471cd68db2d38fcbb4ee2c6d440699833"}, + {file = "regex-2023.5.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:5e3f4468b8c6fd2fd33c218bbd0a1559e6a6fcf185af8bb0cc43f3b5bfb7d636"}, + {file = "regex-2023.5.5-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:59e4b729eae1a0919f9e4c0fc635fbcc9db59c74ad98d684f4877be3d2607dd6"}, + {file = "regex-2023.5.5-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:ba73a14e9c8f9ac409863543cde3290dba39098fc261f717dc337ea72d3ebad2"}, + {file = "regex-2023.5.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:0bbd5dcb19603ab8d2781fac60114fb89aee8494f4505ae7ad141a3314abb1f9"}, + {file = "regex-2023.5.5-cp310-cp310-win32.whl", hash = "sha256:40005cbd383438aecf715a7b47fe1e3dcbc889a36461ed416bdec07e0ef1db66"}, + {file = "regex-2023.5.5-cp310-cp310-win_amd64.whl", hash = "sha256:59597cd6315d3439ed4b074febe84a439c33928dd34396941b4d377692eca810"}, + {file = "regex-2023.5.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:8f08276466fedb9e36e5193a96cb944928301152879ec20c2d723d1031cd4ddd"}, + {file = "regex-2023.5.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:cd46f30e758629c3ee91713529cfbe107ac50d27110fdcc326a42ce2acf4dafc"}, + {file = "regex-2023.5.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f2910502f718828cecc8beff004917dcf577fc5f8f5dd40ffb1ea7612124547b"}, + {file = "regex-2023.5.5-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:445d6f4fc3bd9fc2bf0416164454f90acab8858cd5a041403d7a11e3356980e8"}, + {file = "regex-2023.5.5-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:18196c16a584619c7c1d843497c069955d7629ad4a3fdee240eb347f4a2c9dbe"}, + {file = "regex-2023.5.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:33d430a23b661629661f1fe8395be2004006bc792bb9fc7c53911d661b69dd7e"}, + {file = "regex-2023.5.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:72a28979cc667e5f82ef433db009184e7ac277844eea0f7f4d254b789517941d"}, + {file = "regex-2023.5.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:f764e4dfafa288e2eba21231f455d209f4709436baeebb05bdecfb5d8ddc3d35"}, + {file = "regex-2023.5.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:23d86ad2121b3c4fc78c58f95e19173790e22ac05996df69b84e12da5816cb17"}, + {file = "regex-2023.5.5-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:690a17db524ee6ac4a27efc5406530dd90e7a7a69d8360235323d0e5dafb8f5b"}, + {file = "regex-2023.5.5-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:1ecf3dcff71f0c0fe3e555201cbe749fa66aae8d18f80d2cc4de8e66df37390a"}, + {file = "regex-2023.5.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:811040d7f3dd9c55eb0d8b00b5dcb7fd9ae1761c454f444fd9f37fe5ec57143a"}, + {file = "regex-2023.5.5-cp311-cp311-win32.whl", hash = "sha256:c8c143a65ce3ca42e54d8e6fcaf465b6b672ed1c6c90022794a802fb93105d22"}, + {file = "regex-2023.5.5-cp311-cp311-win_amd64.whl", hash = "sha256:586a011f77f8a2da4b888774174cd266e69e917a67ba072c7fc0e91878178a80"}, + {file = "regex-2023.5.5-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:b6365703e8cf1644b82104cdd05270d1a9f043119a168d66c55684b1b557d008"}, + {file = "regex-2023.5.5-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a56c18f21ac98209da9c54ae3ebb3b6f6e772038681d6cb43b8d53da3b09ee81"}, + {file = "regex-2023.5.5-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b8b942d8b3ce765dbc3b1dad0a944712a89b5de290ce8f72681e22b3c55f3cc8"}, + {file = "regex-2023.5.5-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:844671c9c1150fcdac46d43198364034b961bd520f2c4fdaabfc7c7d7138a2dd"}, + {file = "regex-2023.5.5-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c2ce65bdeaf0a386bb3b533a28de3994e8e13b464ac15e1e67e4603dd88787fa"}, + {file = "regex-2023.5.5-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fee0016cc35a8a91e8cc9312ab26a6fe638d484131a7afa79e1ce6165328a135"}, + {file = "regex-2023.5.5-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:18f05d14f14a812fe9723f13afafefe6b74ca042d99f8884e62dbd34dcccf3e2"}, + {file = "regex-2023.5.5-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:941b3f1b2392f0bcd6abf1bc7a322787d6db4e7457be6d1ffd3a693426a755f2"}, + {file = "regex-2023.5.5-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:921473a93bcea4d00295799ab929522fc650e85c6b9f27ae1e6bb32a790ea7d3"}, + {file = "regex-2023.5.5-cp36-cp36m-musllinux_1_1_ppc64le.whl", hash = "sha256:e2205a81f815b5bb17e46e74cc946c575b484e5f0acfcb805fb252d67e22938d"}, + {file = "regex-2023.5.5-cp36-cp36m-musllinux_1_1_s390x.whl", hash = "sha256:385992d5ecf1a93cb85adff2f73e0402dd9ac29b71b7006d342cc920816e6f32"}, + {file = "regex-2023.5.5-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:890a09cb0a62198bff92eda98b2b507305dd3abf974778bae3287f98b48907d3"}, + {file = "regex-2023.5.5-cp36-cp36m-win32.whl", hash = "sha256:821a88b878b6589c5068f4cc2cfeb2c64e343a196bc9d7ac68ea8c2a776acd46"}, + {file = "regex-2023.5.5-cp36-cp36m-win_amd64.whl", hash = "sha256:7918a1b83dd70dc04ab5ed24c78ae833ae8ea228cef84e08597c408286edc926"}, + {file = "regex-2023.5.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:338994d3d4ca4cf12f09822e025731a5bdd3a37aaa571fa52659e85ca793fb67"}, + {file = "regex-2023.5.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0a69cf0c00c4d4a929c6c7717fd918414cab0d6132a49a6d8fc3ded1988ed2ea"}, + {file = "regex-2023.5.5-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8f5e06df94fff8c4c85f98c6487f6636848e1dc85ce17ab7d1931df4a081f657"}, + {file = "regex-2023.5.5-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a8906669b03c63266b6a7693d1f487b02647beb12adea20f8840c1a087e2dfb5"}, + {file = "regex-2023.5.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9fda3e50abad8d0f48df621cf75adc73c63f7243cbe0e3b2171392b445401550"}, + {file = "regex-2023.5.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5ac2b7d341dc1bd102be849d6dd33b09701223a851105b2754339e390be0627a"}, + {file = "regex-2023.5.5-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:fb2b495dd94b02de8215625948132cc2ea360ae84fe6634cd19b6567709c8ae2"}, + {file = "regex-2023.5.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:aa7d032c1d84726aa9edeb6accf079b4caa87151ca9fabacef31fa028186c66d"}, + {file = "regex-2023.5.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:3d45864693351c15531f7e76f545ec35000d50848daa833cead96edae1665559"}, + {file = "regex-2023.5.5-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:21e90a288e6ba4bf44c25c6a946cb9b0f00b73044d74308b5e0afd190338297c"}, + {file = "regex-2023.5.5-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:10250a093741ec7bf74bcd2039e697f519b028518f605ff2aa7ac1e9c9f97423"}, + {file = "regex-2023.5.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:6b8d0c153f07a953636b9cdb3011b733cadd4178123ef728ccc4d5969e67f3c2"}, + {file = "regex-2023.5.5-cp37-cp37m-win32.whl", hash = "sha256:10374c84ee58c44575b667310d5bbfa89fb2e64e52349720a0182c0017512f6c"}, + {file = "regex-2023.5.5-cp37-cp37m-win_amd64.whl", hash = "sha256:9b320677521aabf666cdd6e99baee4fb5ac3996349c3b7f8e7c4eee1c00dfe3a"}, + {file = "regex-2023.5.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:afb1c70ec1e594a547f38ad6bf5e3d60304ce7539e677c1429eebab115bce56e"}, + {file = "regex-2023.5.5-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:cf123225945aa58b3057d0fba67e8061c62d14cc8a4202630f8057df70189051"}, + {file = "regex-2023.5.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a99757ad7fe5c8a2bb44829fc57ced11253e10f462233c1255fe03888e06bc19"}, + {file = "regex-2023.5.5-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a623564d810e7a953ff1357f7799c14bc9beeab699aacc8b7ab7822da1e952b8"}, + {file = "regex-2023.5.5-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ced02e3bd55e16e89c08bbc8128cff0884d96e7f7a5633d3dc366b6d95fcd1d6"}, + {file = "regex-2023.5.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d1cbe6b5be3b9b698d8cc4ee4dee7e017ad655e83361cd0ea8e653d65e469468"}, + {file = "regex-2023.5.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4a6e4b0e0531223f53bad07ddf733af490ba2b8367f62342b92b39b29f72735a"}, + {file = "regex-2023.5.5-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:2e9c4f778514a560a9c9aa8e5538bee759b55f6c1dcd35613ad72523fd9175b8"}, + {file = "regex-2023.5.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:256f7f4c6ba145f62f7a441a003c94b8b1af78cee2cccacfc1e835f93bc09426"}, + {file = "regex-2023.5.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:bd7b68fd2e79d59d86dcbc1ccd6e2ca09c505343445daaa4e07f43c8a9cc34da"}, + {file = "regex-2023.5.5-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:4a5059bd585e9e9504ef9c07e4bc15b0a621ba20504388875d66b8b30a5c4d18"}, + {file = "regex-2023.5.5-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:6893544e06bae009916a5658ce7207e26ed17385149f35a3125f5259951f1bbe"}, + {file = "regex-2023.5.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:c64d5abe91a3dfe5ff250c6bb267ef00dbc01501518225b45a5f9def458f31fb"}, + {file = "regex-2023.5.5-cp38-cp38-win32.whl", hash = "sha256:7923470d6056a9590247ff729c05e8e0f06bbd4efa6569c916943cb2d9b68b91"}, + {file = "regex-2023.5.5-cp38-cp38-win_amd64.whl", hash = "sha256:4035d6945cb961c90c3e1c1ca2feb526175bcfed44dfb1cc77db4fdced060d3e"}, + {file = "regex-2023.5.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:50fd2d9b36938d4dcecbd684777dd12a407add4f9f934f235c66372e630772b0"}, + {file = "regex-2023.5.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:d19e57f888b00cd04fc38f5e18d0efbd91ccba2d45039453ab2236e6eec48d4d"}, + {file = "regex-2023.5.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bd966475e963122ee0a7118ec9024388c602d12ac72860f6eea119a3928be053"}, + {file = "regex-2023.5.5-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:db09e6c18977a33fea26fe67b7a842f706c67cf8bda1450974d0ae0dd63570df"}, + {file = "regex-2023.5.5-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6164d4e2a82f9ebd7752a06bd6c504791bedc6418c0196cd0a23afb7f3e12b2d"}, + {file = "regex-2023.5.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:84397d3f750d153ebd7f958efaa92b45fea170200e2df5e0e1fd4d85b7e3f58a"}, + {file = "regex-2023.5.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9c3efee9bb53cbe7b285760c81f28ac80dc15fa48b5fe7e58b52752e642553f1"}, + {file = "regex-2023.5.5-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:144b5b017646b5a9392a5554a1e5db0000ae637be4971c9747566775fc96e1b2"}, + {file = "regex-2023.5.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:1189fbbb21e2c117fda5303653b61905aeeeea23de4a94d400b0487eb16d2d60"}, + {file = "regex-2023.5.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:f83fe9e10f9d0b6cf580564d4d23845b9d692e4c91bd8be57733958e4c602956"}, + {file = "regex-2023.5.5-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:72aa4746993a28c841e05889f3f1b1e5d14df8d3daa157d6001a34c98102b393"}, + {file = "regex-2023.5.5-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:de2f780c3242ea114dd01f84848655356af4dd561501896c751d7b885ea6d3a1"}, + {file = "regex-2023.5.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:290fd35219486dfbc00b0de72f455ecdd63e59b528991a6aec9fdfc0ce85672e"}, + {file = "regex-2023.5.5-cp39-cp39-win32.whl", hash = "sha256:732176f5427e72fa2325b05c58ad0b45af341c459910d766f814b0584ac1f9ac"}, + {file = "regex-2023.5.5-cp39-cp39-win_amd64.whl", hash = "sha256:1307aa4daa1cbb23823d8238e1f61292fd07e4e5d8d38a6efff00b67a7cdb764"}, + {file = "regex-2023.5.5.tar.gz", hash = "sha256:7d76a8a1fc9da08296462a18f16620ba73bcbf5909e42383b253ef34d9d5141e"}, +] + [[package]] name = "requests" version = "2.31.0" @@ -1417,6 +1841,46 @@ brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)", "brotlipy (>=0.6.0)"] secure = ["certifi", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "ipaddress", "pyOpenSSL (>=0.14)", "urllib3-secure-extra"] socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] +[[package]] +name = "watchdog" +version = "3.0.0" +description = "Filesystem events monitoring" +category = "dev" +optional = false +python-versions = ">=3.7" +files = [ + {file = "watchdog-3.0.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:336adfc6f5cc4e037d52db31194f7581ff744b67382eb6021c868322e32eef41"}, + {file = "watchdog-3.0.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a70a8dcde91be523c35b2bf96196edc5730edb347e374c7de7cd20c43ed95397"}, + {file = "watchdog-3.0.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:adfdeab2da79ea2f76f87eb42a3ab1966a5313e5a69a0213a3cc06ef692b0e96"}, + {file = "watchdog-3.0.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:2b57a1e730af3156d13b7fdddfc23dea6487fceca29fc75c5a868beed29177ae"}, + {file = "watchdog-3.0.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:7ade88d0d778b1b222adebcc0927428f883db07017618a5e684fd03b83342bd9"}, + {file = "watchdog-3.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7e447d172af52ad204d19982739aa2346245cc5ba6f579d16dac4bfec226d2e7"}, + {file = "watchdog-3.0.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:9fac43a7466eb73e64a9940ac9ed6369baa39b3bf221ae23493a9ec4d0022674"}, + {file = "watchdog-3.0.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:8ae9cda41fa114e28faf86cb137d751a17ffd0316d1c34ccf2235e8a84365c7f"}, + {file = "watchdog-3.0.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:25f70b4aa53bd743729c7475d7ec41093a580528b100e9a8c5b5efe8899592fc"}, + {file = "watchdog-3.0.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4f94069eb16657d2c6faada4624c39464f65c05606af50bb7902e036e3219be3"}, + {file = "watchdog-3.0.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:7c5f84b5194c24dd573fa6472685b2a27cc5a17fe5f7b6fd40345378ca6812e3"}, + {file = "watchdog-3.0.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3aa7f6a12e831ddfe78cdd4f8996af9cf334fd6346531b16cec61c3b3c0d8da0"}, + {file = "watchdog-3.0.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:233b5817932685d39a7896b1090353fc8efc1ef99c9c054e46c8002561252fb8"}, + {file = "watchdog-3.0.0-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:13bbbb462ee42ec3c5723e1205be8ced776f05b100e4737518c67c8325cf6100"}, + {file = "watchdog-3.0.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:8f3ceecd20d71067c7fd4c9e832d4e22584318983cabc013dbf3f70ea95de346"}, + {file = "watchdog-3.0.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:c9d8c8ec7efb887333cf71e328e39cffbf771d8f8f95d308ea4125bf5f90ba64"}, + {file = "watchdog-3.0.0-py3-none-manylinux2014_aarch64.whl", hash = "sha256:0e06ab8858a76e1219e68c7573dfeba9dd1c0219476c5a44d5333b01d7e1743a"}, + {file = "watchdog-3.0.0-py3-none-manylinux2014_armv7l.whl", hash = "sha256:d00e6be486affb5781468457b21a6cbe848c33ef43f9ea4a73b4882e5f188a44"}, + {file = "watchdog-3.0.0-py3-none-manylinux2014_i686.whl", hash = "sha256:c07253088265c363d1ddf4b3cdb808d59a0468ecd017770ed716991620b8f77a"}, + {file = "watchdog-3.0.0-py3-none-manylinux2014_ppc64.whl", hash = "sha256:5113334cf8cf0ac8cd45e1f8309a603291b614191c9add34d33075727a967709"}, + {file = "watchdog-3.0.0-py3-none-manylinux2014_ppc64le.whl", hash = "sha256:51f90f73b4697bac9c9a78394c3acbbd331ccd3655c11be1a15ae6fe289a8c83"}, + {file = "watchdog-3.0.0-py3-none-manylinux2014_s390x.whl", hash = "sha256:ba07e92756c97e3aca0912b5cbc4e5ad802f4557212788e72a72a47ff376950d"}, + {file = "watchdog-3.0.0-py3-none-manylinux2014_x86_64.whl", hash = "sha256:d429c2430c93b7903914e4db9a966c7f2b068dd2ebdd2fa9b9ce094c7d459f33"}, + {file = "watchdog-3.0.0-py3-none-win32.whl", hash = "sha256:3ed7c71a9dccfe838c2f0b6314ed0d9b22e77d268c67e015450a29036a81f60f"}, + {file = "watchdog-3.0.0-py3-none-win_amd64.whl", hash = "sha256:4c9956d27be0bb08fc5f30d9d0179a855436e655f046d288e2bcc11adfae893c"}, + {file = "watchdog-3.0.0-py3-none-win_ia64.whl", hash = "sha256:5d9f3a10e02d7371cd929b5d8f11e87d4bad890212ed3901f9b4d68767bee759"}, + {file = "watchdog-3.0.0.tar.gz", hash = "sha256:4d98a320595da7a7c5a18fc48cb633c2e73cda78f93cac2ef42d42bf609a33f9"}, +] + +[package.extras] +watchmedo = ["PyYAML (>=3.10)"] + [[package]] name = "zipp" version = "3.11.0" @@ -1433,10 +1897,7 @@ files = [ docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)"] testing = ["flake8 (<5)", "func-timeout", "jaraco.functools", "jaraco.itertools", "more-itertools", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)"] -[extras] -rdklib = [] - [metadata] lock-version = "2.0" python-versions = "^3.7.2" -content-hash = "39d23def7d67baeaa4bb1ba8172595ec2c298953c9cd22a98c826d32638f924c" +content-hash = "ad2a2d0ac50f61faa70f6b3dea42914278a5524cfaf822aa6aa265881915135e" diff --git a/pyproject.toml b/pyproject.toml index bc07375a..c5f1f5ae 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -121,6 +121,13 @@ safety = "^2.3.5" types-pyyaml = "^6.0.12.10" boto3-stubs = {extras = ["cloudformation", "config", "iam", "s3", "sts"], version = "^1.26.139"} + +[tool.poetry.group.docs.dependencies] +mkdocs = "^1.4.3" +mkdocs-material = "^9.1.14" +mkdocstrings-python = "^1.0.0" +markdown-include = "^0.8.1" + [build-system] requires = ["poetry-core"] build-backend = "poetry.core.masonry.api"