diff --git a/.gitignore b/.gitignore
index c9f841b..f7d387a 100644
--- a/.gitignore
+++ b/.gitignore
@@ -12,3 +12,6 @@ backend.vars
# caches
lambdas/.zip-cache/*
+
+# Junk files
+.DS_Store
diff --git a/README.md b/README.md
index 3bd2e38..c5fddb3 100644
--- a/README.md
+++ b/README.md
@@ -41,6 +41,7 @@ for dxw's Dalmatian hosting platform.
| [aws_cloudtrail.cloudtrail](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/cloudtrail) | resource |
| [aws_cloudwatch_log_group.cloudtrail](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/cloudwatch_log_group) | resource |
| [aws_cloudwatch_log_group.cloudwatch_slack_alerts_lambda_log_group](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/cloudwatch_log_group) | resource |
+| [aws_cloudwatch_log_group.datadog_forwarder_log_group](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/cloudwatch_log_group) | resource |
| [aws_cloudwatch_log_group.delete_default_resources_lambda_log_group](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/cloudwatch_log_group) | resource |
| [aws_codestarconnections_connection.connections](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/codestarconnections_connection) | resource |
| [aws_glue_catalog_database.cloudtrail](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/glue_catalog_database) | resource |
@@ -50,6 +51,13 @@ for dxw's Dalmatian hosting platform.
| [aws_iam_policy.custom](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/iam_policy) | resource |
| [aws_iam_policy.datadog_aws_integration](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/iam_policy) | resource |
| [aws_iam_policy.datadog_aws_integration_resource_collection](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/iam_policy) | resource |
+| [aws_iam_policy.datadog_forwarder](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/iam_policy) | resource |
+| [aws_iam_policy.datadog_forwarder_kms_encrypt](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/iam_policy) | resource |
+| [aws_iam_policy.datadog_forwarder_kms_encrypt_wildcard](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/iam_policy) | resource |
+| [aws_iam_policy.datadog_forwarder_s3_object_read](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/iam_policy) | resource |
+| [aws_iam_policy.datadog_forwarder_s3_object_rw](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/iam_policy) | resource |
+| [aws_iam_policy.datadog_forwarder_secret](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/iam_policy) | resource |
+| [aws_iam_policy.datadog_forwarder_tags](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/iam_policy) | resource |
| [aws_iam_policy.delete_default_resources_lambda](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/iam_policy) | resource |
| [aws_iam_policy.delete_default_resources_vpc_delete_lambda](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/iam_policy) | resource |
| [aws_iam_policy.ssm_dhmc](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/iam_policy) | resource |
@@ -57,6 +65,7 @@ for dxw's Dalmatian hosting platform.
| [aws_iam_role.cloudwatch_slack_alerts_lambda](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/iam_role) | resource |
| [aws_iam_role.custom](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/iam_role) | resource |
| [aws_iam_role.datadog_aws_integration](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/iam_role) | resource |
+| [aws_iam_role.datadog_forwarder](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/iam_role) | resource |
| [aws_iam_role.delete_default_resources_lambda](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/iam_role) | resource |
| [aws_iam_role.ssm_dhmc](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/iam_role) | resource |
| [aws_iam_role_policy_attachment.cloudtrail_cloudwatch_logs](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/iam_role_policy_attachment) | resource |
@@ -65,6 +74,13 @@ for dxw's Dalmatian hosting platform.
| [aws_iam_role_policy_attachment.datadog_aws_integration](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/iam_role_policy_attachment) | resource |
| [aws_iam_role_policy_attachment.datadog_aws_integration_resource_collection](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/iam_role_policy_attachment) | resource |
| [aws_iam_role_policy_attachment.datadog_aws_integration_security_audit](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/iam_role_policy_attachment) | resource |
+| [aws_iam_role_policy_attachment.datadog_forwarder](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/iam_role_policy_attachment) | resource |
+| [aws_iam_role_policy_attachment.datadog_forwarder_kms_encrypt](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/iam_role_policy_attachment) | resource |
+| [aws_iam_role_policy_attachment.datadog_forwarder_kms_encrypt_wildcard](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/iam_role_policy_attachment) | resource |
+| [aws_iam_role_policy_attachment.datadog_forwarder_s3_object_read](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/iam_role_policy_attachment) | resource |
+| [aws_iam_role_policy_attachment.datadog_forwarder_s3_object_rw](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/iam_role_policy_attachment) | resource |
+| [aws_iam_role_policy_attachment.datadog_forwarder_secret](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/iam_role_policy_attachment) | resource |
+| [aws_iam_role_policy_attachment.datadog_forwarder_tags](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/iam_role_policy_attachment) | resource |
| [aws_iam_role_policy_attachment.delete_default_resources_lambda](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/iam_role_policy_attachment) | resource |
| [aws_iam_role_policy_attachment.delete_default_resources_vpc_delete_lambda](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/iam_role_policy_attachment) | resource |
| [aws_iam_role_policy_attachment.ssm_dhmc](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/iam_role_policy_attachment) | resource |
@@ -73,37 +89,51 @@ for dxw's Dalmatian hosting platform.
| [aws_kms_alias.cloudwatch_opsgenie_alerts_sns](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/kms_alias) | resource |
| [aws_kms_alias.cloudwatch_opsgenie_alerts_sns_us_east_1](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/kms_alias) | resource |
| [aws_kms_alias.cloudwatch_slack_alerts](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/kms_alias) | resource |
+| [aws_kms_alias.datadog_forwarder](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/kms_alias) | resource |
+| [aws_kms_alias.datadog_lambda](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/kms_alias) | resource |
| [aws_kms_alias.delete_default_resources_lambda](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/kms_alias) | resource |
| [aws_kms_key.athena_cloudtrail_output](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/kms_key) | resource |
| [aws_kms_key.cloudtrail_cloudwatch_logs](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/kms_key) | resource |
| [aws_kms_key.cloudwatch_opsgenie_alerts_sns](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/kms_key) | resource |
| [aws_kms_key.cloudwatch_opsgenie_alerts_sns_us_east_1](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/kms_key) | resource |
| [aws_kms_key.cloudwatch_slack_alerts](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/kms_key) | resource |
+| [aws_kms_key.datadog_forwarder](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/kms_key) | resource |
+| [aws_kms_key.datadog_lambda](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/kms_key) | resource |
| [aws_kms_key.delete_default_resources_lambda](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/kms_key) | resource |
| [aws_lambda_function.cloudwatch_slack_alerts](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/lambda_function) | resource |
+| [aws_lambda_function.datadog_service_log_forwarder](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/lambda_function) | resource |
| [aws_lambda_function.delete_default_resources](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/lambda_function) | resource |
| [aws_lambda_permission.cloudwatch_slack_alerts_sns](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/lambda_permission) | resource |
| [aws_route53_zone.root](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/route53_zone) | resource |
| [aws_s3_bucket.athena_cloudtrail_output](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/s3_bucket) | resource |
| [aws_s3_bucket.cloudtrail](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/s3_bucket) | resource |
+| [aws_s3_bucket.datadog_lambda](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/s3_bucket) | resource |
| [aws_s3_bucket.logs](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/s3_bucket) | resource |
| [aws_s3_bucket_lifecycle_configuration.athena_cloudtrail_output](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/s3_bucket_lifecycle_configuration) | resource |
| [aws_s3_bucket_lifecycle_configuration.cloudtrail](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/s3_bucket_lifecycle_configuration) | resource |
+| [aws_s3_bucket_lifecycle_configuration.datadog_lambda](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/s3_bucket_lifecycle_configuration) | resource |
| [aws_s3_bucket_lifecycle_configuration.logs](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/s3_bucket_lifecycle_configuration) | resource |
| [aws_s3_bucket_logging.athena_cloudtrail_output](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/s3_bucket_logging) | resource |
| [aws_s3_bucket_logging.cloudtrail](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/s3_bucket_logging) | resource |
+| [aws_s3_bucket_logging.datadog_lambda](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/s3_bucket_logging) | resource |
| [aws_s3_bucket_policy.athena_cloudtrail_output](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/s3_bucket_policy) | resource |
| [aws_s3_bucket_policy.cloudtrail](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/s3_bucket_policy) | resource |
+| [aws_s3_bucket_policy.datadog_lambda](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/s3_bucket_policy) | resource |
| [aws_s3_bucket_policy.logs](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/s3_bucket_policy) | resource |
| [aws_s3_bucket_public_access_block.athena_cloudtrail_output](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/s3_bucket_public_access_block) | resource |
| [aws_s3_bucket_public_access_block.cloudtrail](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/s3_bucket_public_access_block) | resource |
+| [aws_s3_bucket_public_access_block.datadog_lambda](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/s3_bucket_public_access_block) | resource |
| [aws_s3_bucket_public_access_block.logs](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/s3_bucket_public_access_block) | resource |
| [aws_s3_bucket_server_side_encryption_configuration.athena_cloudtrail_output](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/s3_bucket_server_side_encryption_configuration) | resource |
| [aws_s3_bucket_server_side_encryption_configuration.cloudtrail](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/s3_bucket_server_side_encryption_configuration) | resource |
+| [aws_s3_bucket_server_side_encryption_configuration.datadog_lambda](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/s3_bucket_server_side_encryption_configuration) | resource |
| [aws_s3_bucket_server_side_encryption_configuration.logs](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/s3_bucket_server_side_encryption_configuration) | resource |
| [aws_s3_bucket_versioning.athena_cloudtrail_output](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/s3_bucket_versioning) | resource |
| [aws_s3_bucket_versioning.cloudtrail](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/s3_bucket_versioning) | resource |
+| [aws_s3_bucket_versioning.datadog_lambda](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/s3_bucket_versioning) | resource |
| [aws_s3_bucket_versioning.logs](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/s3_bucket_versioning) | resource |
+| [aws_secretsmanager_secret.datadog_api_key](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/secretsmanager_secret) | resource |
+| [aws_secretsmanager_secret_version.datadog_api_key](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/secretsmanager_secret_version) | resource |
| [aws_sns_topic.cloudwatch_opsgenie_alerts](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/sns_topic) | resource |
| [aws_sns_topic.cloudwatch_opsgenie_alerts_us_east_1](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/sns_topic) | resource |
| [aws_sns_topic.cloudwatch_slack_alerts](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/sns_topic) | resource |
@@ -115,7 +145,10 @@ for dxw's Dalmatian hosting platform.
| [aws_sns_topic_subscription.cloudwatch_slack_alerts_lambda_subscription](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/sns_topic_subscription) | resource |
| [aws_ssm_service_setting.ssm_dhmc](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/resources/ssm_service_setting) | resource |
| [datadog_integration_aws.aws](https://registry.terraform.io/providers/DataDog/datadog/latest/docs/resources/integration_aws) | resource |
+| [datadog_integration_aws_lambda_arn.datadog_forwarder_arn](https://registry.terraform.io/providers/DataDog/datadog/latest/docs/resources/integration_aws_lambda_arn) | resource |
+| [datadog_integration_aws_log_collection.datadog_forwarder](https://registry.terraform.io/providers/DataDog/datadog/latest/docs/resources/integration_aws_log_collection) | resource |
| [archive_file.cloudwatch_slack_alerts_lambda](https://registry.terraform.io/providers/hashicorp/archive/latest/docs/data-sources/file) | data source |
+| [archive_file.datadog_forwarder](https://registry.terraform.io/providers/hashicorp/archive/latest/docs/data-sources/file) | data source |
| [archive_file.delete_default_resources_lambda](https://registry.terraform.io/providers/hashicorp/archive/latest/docs/data-sources/file) | data source |
| [aws_caller_identity.current](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/data-sources/caller_identity) | data source |
| [aws_regions.current](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/data-sources/regions) | data source |
@@ -142,6 +175,9 @@ for dxw's Dalmatian hosting platform.
| [custom\_iam\_roles](#input\_custom\_iam\_roles) | Configure custom IAM roles/policies |
map(object({
description = string
policies = map(object({
description = string
Version = string
Statement = list(object({
Action = list(string)
Effect = string
Resource = string
}))
}))
assume_role_policy = object({
Version = string
Statement = list(object({
Action = list(string)
Effect = string
Principal = map(string)
}))
})
}))
| n/a | yes |
| [datadog\_api\_key](#input\_datadog\_api\_key) | Datadog API key | `string` | n/a | yes |
| [datadog\_app\_key](#input\_datadog\_app\_key) | Datadog App key | `string` | n/a | yes |
+| [datadog\_forwarder\_enhanced\_metrics](#input\_datadog\_forwarder\_enhanced\_metrics) | Set the environment variable DD\_ENHANCED\_METRICS on the Forwarder. Set to false to stop the Forwarder from generating enhanced metrics itself, but it will still forward custom metrics from other lambdas. | `bool` | n/a | yes |
+| [datadog\_forwarder\_log\_retention](#input\_datadog\_forwarder\_log\_retention) | Datadog Forwarder S3 bucket retention in days. Set to 0 to keep all logs. | `number` | n/a | yes |
+| [datadog\_forwarder\_store\_failed\_events](#input\_datadog\_forwarder\_store\_failed\_events) | Set environment variable DD\_STORE\_FAILED\_EVENTS on the Forwarder. Set to true to enable the forwarder to also store event data in the S3 bucket | `bool` | n/a | yes |
| [datadog\_region](#input\_datadog\_region) | Datadog region | `string` | n/a | yes |
| [delete\_default\_resources\_lambda\_kms\_encryption](#input\_delete\_default\_resources\_lambda\_kms\_encryption) | Conditionally encrypt the Delete Default Resources Lambda logs with KMS | `bool` | n/a | yes |
| [delete\_default\_resources\_log\_retention](#input\_delete\_default\_resources\_log\_retention) | Log retention for the Delete Default Resources Lambda | `number` | n/a | yes |
@@ -149,6 +185,7 @@ for dxw's Dalmatian hosting platform.
| [enable\_cloudwatch\_opsgenie\_alerts](#input\_enable\_cloudwatch\_opsgenie\_alerts) | Enable CloudWatch Opsgenie alerts. This creates an SNS topic to which alerts and pipelines can send messages, which are then sent to the Opsgenie SNS endpoint. | `bool` | n/a | yes |
| [enable\_cloudwatch\_slack\_alerts](#input\_enable\_cloudwatch\_slack\_alerts) | Enable CloudWatch Slack alerts. This creates an SNS topic to which alerts and pipelines can send messages, which are then picked up by a Lambda function that forwards them to a Slack webhook. | `bool` | n/a | yes |
| [enable\_datadog\_aws\_integration](#input\_enable\_datadog\_aws\_integration) | Conditionally create the datadog AWS integration role (https://docs.datadoghq.com/integrations/guide/aws-terraform-setup/) and configure the datadog integration | `bool` | n/a | yes |
+| [enable\_datadog\_forwarder](#input\_enable\_datadog\_forwarder) | Conditionally launch Datadog AWS service log forwarder lambda | `bool` | n/a | yes |
| [enable\_delete\_default\_resources](#input\_enable\_delete\_default\_resources) | Creates a Lambda function which deletes all default VPCs and resources within them. This only needs to be ran once, either through the AWS console or via the AWS CLI | `bool` | n/a | yes |
| [enable\_route53\_root\_hosted\_zone](#input\_enable\_route53\_root\_hosted\_zone) | Conditionally create Route53 hosted zone, which will contain the DNS records for resources launched within the account. | `bool` | n/a | yes |
| [enable\_s3\_tfvars](#input\_enable\_s3\_tfvars) | enable\_s3\_tfvars | `bool` | n/a | yes |
diff --git a/datadog-forwarder-lambda.tf b/datadog-forwarder-lambda.tf
new file mode 100644
index 0000000..c9c6577
--- /dev/null
+++ b/datadog-forwarder-lambda.tf
@@ -0,0 +1,259 @@
+resource "aws_kms_key" "datadog_forwarder" {
+ count = local.enable_datadog_forwarder ? 1 : 0
+
+ description = "This key is used to encrypt the DataDog Forwarder Lambda logs (${local.project_name})"
+ deletion_window_in_days = 10
+ enable_key_rotation = true
+ policy = templatefile(
+ "${path.root}/policies/kms-key-policy.json.tpl",
+ {
+ statement = <=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*
+Description-Content-Type: text/x-rst
+License-File: LICENSE.rst
+Requires-Dist: wrapt (<2,>=1.10)
+Provides-Extra: dev
+Requires-Dist: tox ; extra == 'dev'
+Requires-Dist: PyTest ; extra == 'dev'
+Requires-Dist: PyTest-Cov ; extra == 'dev'
+Requires-Dist: bump2version (<1) ; extra == 'dev'
+Requires-Dist: sphinx (<2) ; extra == 'dev'
+
+
+Deprecated Library
+------------------
+
+Deprecated is Easy to Use
+`````````````````````````
+
+If you need to mark a function or a method as deprecated,
+you can use the ``@deprecated`` decorator:
+
+Save in a hello.py:
+
+.. code:: python
+
+ from deprecated import deprecated
+
+
+ @deprecated(version='1.2.1', reason="You should use another function")
+ def some_old_function(x, y):
+ return x + y
+
+
+ class SomeClass(object):
+ @deprecated(version='1.3.0', reason="This method is deprecated")
+ def some_old_method(self, x, y):
+ return x + y
+
+
+ some_old_function(12, 34)
+ obj = SomeClass()
+ obj.some_old_method(5, 8)
+
+
+And Easy to Setup
+`````````````````
+
+And run it:
+
+.. code:: bash
+
+ $ pip install Deprecated
+ $ python hello.py
+ hello.py:15: DeprecationWarning: Call to deprecated function (or staticmethod) some_old_function.
+ (You should use another function) -- Deprecated since version 1.2.0.
+ some_old_function(12, 34)
+ hello.py:17: DeprecationWarning: Call to deprecated method some_old_method.
+ (This method is deprecated) -- Deprecated since version 1.3.0.
+ obj.some_old_method(5, 8)
+
+
+You can document your code
+``````````````````````````
+
+Have you ever wonder how to document that some functions, classes, methods, etc. are deprecated?
+This is now possible with the integrated Sphinx directives:
+
+For instance, in hello_sphinx.py:
+
+.. code:: python
+
+ from deprecated.sphinx import deprecated
+ from deprecated.sphinx import versionadded
+ from deprecated.sphinx import versionchanged
+
+
+ @versionadded(version='1.0', reason="This function is new")
+ def function_one():
+ '''This is the function one'''
+
+
+ @versionchanged(version='1.0', reason="This function is modified")
+ def function_two():
+ '''This is the function two'''
+
+
+ @deprecated(version='1.0', reason="This function will be removed soon")
+ def function_three():
+ '''This is the function three'''
+
+
+ function_one()
+ function_two()
+ function_three() # warns
+
+ help(function_one)
+ help(function_two)
+ help(function_three)
+
+
+The result it immediate
+```````````````````````
+
+Run it:
+
+.. code:: bash
+
+ $ python hello_sphinx.py
+
+ hello_sphinx.py:23: DeprecationWarning: Call to deprecated function (or staticmethod) function_three.
+ (This function will be removed soon) -- Deprecated since version 1.0.
+ function_three() # warns
+
+ Help on function function_one in module __main__:
+
+ function_one()
+ This is the function one
+
+ .. versionadded:: 1.0
+ This function is new
+
+ Help on function function_two in module __main__:
+
+ function_two()
+ This is the function two
+
+ .. versionchanged:: 1.0
+ This function is modified
+
+ Help on function function_three in module __main__:
+
+ function_three()
+ This is the function three
+
+ .. deprecated:: 1.0
+ This function will be removed soon
+
+
+Links
+`````
+
+* `Python package index (PyPi) `_
+* `GitHub website `_
+* `Read The Docs `_
+* `EBook on Lulu.com `_
+* `StackOverFlow Q&A `_
+* `Development version
+ `_
+
diff --git a/lambdas/aws-dd-forwarder-3.127.0/Deprecated-1.2.14.dist-info/RECORD b/lambdas/aws-dd-forwarder-3.127.0/Deprecated-1.2.14.dist-info/RECORD
new file mode 100644
index 0000000..d8f67cb
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/Deprecated-1.2.14.dist-info/RECORD
@@ -0,0 +1,13 @@
+Deprecated-1.2.14.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
+Deprecated-1.2.14.dist-info/LICENSE.rst,sha256=HoPt0VvkGbXVveNy4yXlJ_9PmRX1SOfHUxS0H2aZ6Dw,1081
+Deprecated-1.2.14.dist-info/METADATA,sha256=xQYvk5nwOfnkxxRD-VHkpE-sMu0IBHRZ8ayspypfkTs,5354
+Deprecated-1.2.14.dist-info/RECORD,,
+Deprecated-1.2.14.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+Deprecated-1.2.14.dist-info/WHEEL,sha256=a-zpFRIJzOq5QfuhBzbhiA1eHTzNCJn8OdRvhdNX0Rk,110
+Deprecated-1.2.14.dist-info/top_level.txt,sha256=nHbOYawKPQQE5lQl-toUB1JBRJjUyn_m_Mb8RVJ0RjA,11
+deprecated/__init__.py,sha256=ZphiULqDVrESSB0mLV2WA88JyhQxZSK44zuDGbV5k-g,349
+deprecated/__pycache__/__init__.cpython-311.pyc,,
+deprecated/__pycache__/classic.cpython-311.pyc,,
+deprecated/__pycache__/sphinx.cpython-311.pyc,,
+deprecated/classic.py,sha256=QugmUi7IhBvp2nDvMtyWqFDPRR43-9nfSZG1ZJSDpFM,9880
+deprecated/sphinx.py,sha256=NqQ0oKGcVn6yUe23iGbCieCgvWbEDQSPt9QelbXJnDU,10258
diff --git a/lambdas/aws-dd-forwarder-3.127.0/Deprecated-1.2.14.dist-info/REQUESTED b/lambdas/aws-dd-forwarder-3.127.0/Deprecated-1.2.14.dist-info/REQUESTED
new file mode 100644
index 0000000..e69de29
diff --git a/lambdas/aws-dd-forwarder-3.127.0/Deprecated-1.2.14.dist-info/WHEEL b/lambdas/aws-dd-forwarder-3.127.0/Deprecated-1.2.14.dist-info/WHEEL
new file mode 100644
index 0000000..f771c29
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/Deprecated-1.2.14.dist-info/WHEEL
@@ -0,0 +1,6 @@
+Wheel-Version: 1.0
+Generator: bdist_wheel (0.40.0)
+Root-Is-Purelib: true
+Tag: py2-none-any
+Tag: py3-none-any
+
diff --git a/lambdas/aws-dd-forwarder-3.127.0/Deprecated-1.2.14.dist-info/top_level.txt b/lambdas/aws-dd-forwarder-3.127.0/Deprecated-1.2.14.dist-info/top_level.txt
new file mode 100644
index 0000000..9f8d550
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/Deprecated-1.2.14.dist-info/top_level.txt
@@ -0,0 +1 @@
+deprecated
diff --git a/lambdas/aws-dd-forwarder-3.127.0/META_INF/aws_signer_signature_v1.0.SF b/lambdas/aws-dd-forwarder-3.127.0/META_INF/aws_signer_signature_v1.0.SF
new file mode 100644
index 0000000..b55229a
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/META_INF/aws_signer_signature_v1.0.SF
@@ -0,0 +1,70 @@
+-----BEGIN PKCS7-----
+MIAGCSqGSIb3DQEHAqCAMIACAQExDTALBglghkgBZQMEAgIwCwYJKoZIhvcNAQcB
+oIAwggJ7MIICAqADAgECAhEAgseesYiwj2v65uNdIxGWjDAKBggqhkjOPQQDAzBp
+MQswCQYDVQQGEwJVUzEMMAoGA1UECgwDQVdTMRUwEwYDVQQLDAxDcnlwdG9ncmFw
+aHkxCzAJBgNVBAgMAldBMSgwJgYDVQQDDB9TaWduZXIgdXMtZWFzdC0xIFNVQk9S
+RElOQVRFIENBMB4XDTI0MTAxMTAzNTUwNloXDTI0MTAxNDA0NTUwNVowYjELMAkG
+A1UEBhMCVVMxCzAJBgNVBAgMAldBMRAwDgYDVQQHDAdTZWF0dGxlMQwwCgYDVQQK
+DANBV1MxFTATBgNVBAsMDENyeXB0b2dyYXBoeTEPMA0GA1UEAwwGc2lnbmVyMHYw
+EAYHKoZIzj0CAQYFK4EEACIDYgAENEJBApwU+6Uew83wGVUmaoQVqhH8OWJ8uhNK
+pYeGe2aO+ltRW56QVdcbTV5bndUcW9nslQ946H5Zn1Un/j/WPxkBJjUfNZvKvwDL
+kD2kbr6fpJnpTrylWuJC/JyRXP3Uo3UwczAJBgNVHRMEAjAAMB8GA1UdIwQYMBaA
+FBW0Hd/bDRgmoXefjl93qtSKwJXVMB0GA1UdDgQWBBRcF773L+DbVZJg2nkGpbHH
+SGoCJDAOBgNVHQ8BAf8EBAMCB4AwFgYDVR0lAQH/BAwwCgYIKwYBBQUHAwMwCgYI
+KoZIzj0EAwMDZwAwZAIwEVaZHYn31KWcjTkpUDxyYtN2OZdFmDx05ZwyOjz74MbS
+ZI1gWjpUz8fG5ZeZ2rzfAjAiJzHe0f6PnC29XXlT+XbaLCgX+93ma2BO0ssYugs6
+FMXwBljceZQwC+X4Dpp58agwggJzMIIB+qADAgECAhEAzX5q6bZNJ1qnMgY/JSNY
+BDAKBggqhkjOPQQDAzBpMQswCQYDVQQGEwJVUzEMMAoGA1UECgwDQVdTMRUwEwYD
+VQQLDAxDcnlwdG9ncmFwaHkxCzAJBgNVBAgMAldBMSgwJgYDVQQDDB9TaWduZXIg
+dXMtd2VzdC0yIFNVQk9SRElOQVRFIENBMB4XDTI0MDgwMTE0MjIxNloXDTI1MDUw
+MTE1MjIxNlowaTELMAkGA1UEBhMCVVMxDDAKBgNVBAoMA0FXUzEVMBMGA1UECwwM
+Q3J5cHRvZ3JhcGh5MQswCQYDVQQIDAJXQTEoMCYGA1UEAwwfU2lnbmVyIHVzLWVh
+c3QtMSBTVUJPUkRJTkFURSBDQTB2MBAGByqGSM49AgEGBSuBBAAiA2IABB6NdKgv
+U6rWCAUVIeCLunsKQNOQeSGS0+Vu1NMa3pyx8kJTdhkB/Alc2BiC9q8Vg7JS8I9y
+sLS73z4eTu915XFBRPNEszWLLcjuxyms5V1261wOnVompfb6sWohfQDQ9aNmMGQw
+EgYDVR0TAQH/BAgwBgEB/wIBADAfBgNVHSMEGDAWgBQiEqKJBM6ppy9zVIGE+K0F
+Mi1HazAdBgNVHQ4EFgQUFbQd39sNGCahd5+OX3eq1IrAldUwDgYDVR0PAQH/BAQD
+AgGGMAoGCCqGSM49BAMDA2cAMGQCMBXdKjpiudEhq7Fb/vVcb9DVuelVi2O8SENH
+kwH5aBiMe1S1VYTkP8eRtaiM6Qzh8wIwEz/oPBUFEDawCkVpg7i726Db75ZdISZ+
+IzTrUnDdv0xinWVl7/2oKs7UQx1birspMIICbTCCAfOgAwIBAgIRANBJlLz1Fa3M
+o9YlS8oV3AUwCgYIKoZIzj0EAwMwYjELMAkGA1UEBhMCVVMxDDAKBgNVBAoMA0FX
+UzEVMBMGA1UECwwMQ3J5cHRvZ3JhcGh5MQswCQYDVQQIDAJXQTEhMB8GA1UEAwwY
+U2lnbmVyIHVzLXdlc3QtMiBST09UIENBMB4XDTIxMDcxMjE5NTAzN1oXDTI2MDcx
+MjIwNTAzN1owaTELMAkGA1UEBhMCVVMxDDAKBgNVBAoMA0FXUzEVMBMGA1UECwwM
+Q3J5cHRvZ3JhcGh5MQswCQYDVQQIDAJXQTEoMCYGA1UEAwwfU2lnbmVyIHVzLXdl
+c3QtMiBTVUJPUkRJTkFURSBDQTB2MBAGByqGSM49AgEGBSuBBAAiA2IABNzE7TkM
+6mrYlE9trVdemsxNbCWXUwjM1x8mOqtZ04mL6xLPnubDeGX0C+Zx4QjH3/kspxcZ
+hAyvV2wvs8SA/HMVv1gMVwrmqtMgsNzBF7DjROPZ2aVRaNdb4DZYpVKTk6NmMGQw
+EgYDVR0TAQH/BAgwBgEB/wIBATAfBgNVHSMEGDAWgBR57Gmd9LD9Ijf7LzNGP0Gx
+CPahXzAdBgNVHQ4EFgQUIhKiiQTOqacvc1SBhPitBTItR2swDgYDVR0PAQH/BAQD
+AgGGMAoGCCqGSM49BAMDA2gAMGUCMDK5gsIf52Gh5TFT2WQDWwgLfoHlaUqGq2yv
+/TPFvJQ6PeU52DxsFkUjBK9y3D/CdwIxANHVSZ9E625jNBrq7211RBbA9FG39N8z
+dDyrvpQPuCid4fruMkuAPOLnoWOk5YpV+DCCAkQwggHKoAMCAQICEQCwD+lKFGkZ
+G4M9/Aaa0RubMAoGCCqGSM49BAMDMGIxCzAJBgNVBAYTAlVTMQwwCgYDVQQKDANB
+V1MxFTATBgNVBAsMDENyeXB0b2dyYXBoeTELMAkGA1UECAwCV0ExITAfBgNVBAMM
+GFNpZ25lciB1cy13ZXN0LTIgUk9PVCBDQTAgFw0yMDA3MTYxODIxNDdaGA8yMTIw
+MDcxNjE5MjE0N1owYjELMAkGA1UEBhMCVVMxDDAKBgNVBAoMA0FXUzEVMBMGA1UE
+CwwMQ3J5cHRvZ3JhcGh5MQswCQYDVQQIDAJXQTEhMB8GA1UEAwwYU2lnbmVyIHVz
+LXdlc3QtMiBST09UIENBMHYwEAYHKoZIzj0CAQYFK4EEACIDYgAEZVHVljB0VcdR
+HM0iy/fmrq8iLSA4W1myRPlG7EDEXD5jwZ05J3oWceNJ9RQjHhSRBUEWu1UEhGJ8
+GSQcE0CoT2qp5qKFjBrPyRD9L3K9w/ZIumQvYsuv30zlJDPyo4Xuo0IwQDAPBgNV
+HRMBAf8EBTADAQH/MB0GA1UdDgQWBBR57Gmd9LD9Ijf7LzNGP0GxCPahXzAOBgNV
+HQ8BAf8EBAMCAYYwCgYIKoZIzj0EAwMDaAAwZQIxAOZ2pyA0jXTej7akG1tz3/PQ
+dubi6A+9ZhzMx4kIWvdd/AflwCy33hvVPDoWbVG8vAIwHrwSAF/cyvDpmSnbJmll
+5gHk0spcT17Y5BEEkXSENlsajdDLje9JjGgvaUdVLqMcAAAxggKrMIICpwIBATB+
+MGkxCzAJBgNVBAYTAlVTMQwwCgYDVQQKDANBV1MxFTATBgNVBAsMDENyeXB0b2dy
+YXBoeTELMAkGA1UECAwCV0ExKDAmBgNVBAMMH1NpZ25lciB1cy1lYXN0LTEgU1VC
+T1JESU5BVEUgQ0ECEQCCx56xiLCPa/rm410jEZaMMAsGCWCGSAFlAwQCAqCCAZ8w
+GAYJKoZIhvcNAQkDMQsGCSqGSIb3DQEHATAgBgcrgTuBCQEDMRUYEzIwMzYwMTEx
+MTU1MDUzLjcwMVowIgYJKoZIhvcNAQkFMRUYEzIwMjQxMDExMTU1MDUzLjcwMVow
+KAYJKoZIhvcNAQk0MRswGTALBglghkgBZQMEAgKhCgYIKoZIzj0EAwMwPwYJKoZI
+hvcNAQkEMTIEMLjbYXVmR5oZlI5YUD9xABhD9seqVJ1fKmT3UkDiWBZK3RlBus8+
+xB7MlG/8YxEn/DBlBgcrgTuBCQECMVoMWGFybjphd3M6c2lnbmVyOnVzLWVhc3Qt
+MTo0NjQ2MjI1MzIwMTI6L3NpZ25pbmctam9icy9jODlkOTE4NC1mY2FhLTRjYzUt
+ODFmMC0zMjQyZjdmYjllMTUwawYHK4E7gQkBBDFgDF5hcm46YXdzOnNpZ25lcjp1
+cy1lYXN0LTE6NDY0NjIyNTMyMDEyOi9zaWduaW5nLXByb2ZpbGVzL0RhdGFkb2dM
+YW1iZGFTaWduaW5nUHJvZmlsZS85dk1JOVpBR0xjMAoGCCqGSM49BAMDBGYwZAIw
+M+knoFUHpSY1U+qmX1EUQCenrg4n+wc1fK5pv8K+LddOf9KHqrY28GkutH3sLF31
+AjBX0SUoW/3rEedtYJ/N9uGDlNn69Iw2ooboeBNjK9xb4QMTHsCPBR8PBFve33rq
+ADYAAAAAAAA=
+-----END PKCS7-----
diff --git a/lambdas/aws-dd-forwarder-3.127.0/__init__.py b/lambdas/aws-dd-forwarder-3.127.0/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/lambdas/aws-dd-forwarder-3.127.0/attr/__init__.py b/lambdas/aws-dd-forwarder-3.127.0/attr/__init__.py
new file mode 100644
index 0000000..51b1c25
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/attr/__init__.py
@@ -0,0 +1,103 @@
+# SPDX-License-Identifier: MIT
+
+"""
+Classes Without Boilerplate
+"""
+
+from functools import partial
+from typing import Callable
+
+from . import converters, exceptions, filters, setters, validators
+from ._cmp import cmp_using
+from ._compat import Protocol
+from ._config import get_run_validators, set_run_validators
+from ._funcs import asdict, assoc, astuple, evolve, has, resolve_types
+from ._make import (
+ NOTHING,
+ Attribute,
+ Converter,
+ Factory,
+ attrib,
+ attrs,
+ fields,
+ fields_dict,
+ make_class,
+ validate,
+)
+from ._next_gen import define, field, frozen, mutable
+from ._version_info import VersionInfo
+
+
+s = attributes = attrs
+ib = attr = attrib
+dataclass = partial(attrs, auto_attribs=True) # happy Easter ;)
+
+
+class AttrsInstance(Protocol):
+ pass
+
+
+__all__ = [
+ "Attribute",
+ "AttrsInstance",
+ "Converter",
+ "Factory",
+ "NOTHING",
+ "asdict",
+ "assoc",
+ "astuple",
+ "attr",
+ "attrib",
+ "attributes",
+ "attrs",
+ "cmp_using",
+ "converters",
+ "define",
+ "evolve",
+ "exceptions",
+ "field",
+ "fields",
+ "fields_dict",
+ "filters",
+ "frozen",
+ "get_run_validators",
+ "has",
+ "ib",
+ "make_class",
+ "mutable",
+ "resolve_types",
+ "s",
+ "set_run_validators",
+ "setters",
+ "validate",
+ "validators",
+]
+
+
+def _make_getattr(mod_name: str) -> Callable:
+ """
+ Create a metadata proxy for packaging information that uses *mod_name* in
+ its warnings and errors.
+ """
+
+ def __getattr__(name: str) -> str:
+ if name not in ("__version__", "__version_info__"):
+ msg = f"module {mod_name} has no attribute {name}"
+ raise AttributeError(msg)
+
+ try:
+ from importlib.metadata import metadata
+ except ImportError:
+ from importlib_metadata import metadata
+
+ meta = metadata("attrs")
+
+ if name == "__version_info__":
+ return VersionInfo._from_version_string(meta["version"])
+
+ return meta["version"]
+
+ return __getattr__
+
+
+__getattr__ = _make_getattr(__name__)
diff --git a/lambdas/aws-dd-forwarder-3.127.0/attr/__init__.pyi b/lambdas/aws-dd-forwarder-3.127.0/attr/__init__.pyi
new file mode 100644
index 0000000..6ae0a83
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/attr/__init__.pyi
@@ -0,0 +1,388 @@
+import enum
+import sys
+
+from typing import (
+ Any,
+ Callable,
+ Generic,
+ Mapping,
+ Protocol,
+ Sequence,
+ TypeVar,
+ overload,
+)
+
+# `import X as X` is required to make these public
+from . import converters as converters
+from . import exceptions as exceptions
+from . import filters as filters
+from . import setters as setters
+from . import validators as validators
+from ._cmp import cmp_using as cmp_using
+from ._typing_compat import AttrsInstance_
+from ._version_info import VersionInfo
+from attrs import (
+ define as define,
+ field as field,
+ mutable as mutable,
+ frozen as frozen,
+ _EqOrderType,
+ _ValidatorType,
+ _ConverterType,
+ _ReprArgType,
+ _OnSetAttrType,
+ _OnSetAttrArgType,
+ _FieldTransformer,
+ _ValidatorArgType,
+)
+
+if sys.version_info >= (3, 10):
+ from typing import TypeGuard
+else:
+ from typing_extensions import TypeGuard
+
+if sys.version_info >= (3, 11):
+ from typing import dataclass_transform
+else:
+ from typing_extensions import dataclass_transform
+
+__version__: str
+__version_info__: VersionInfo
+__title__: str
+__description__: str
+__url__: str
+__uri__: str
+__author__: str
+__email__: str
+__license__: str
+__copyright__: str
+
+_T = TypeVar("_T")
+_C = TypeVar("_C", bound=type)
+
+_FilterType = Callable[["Attribute[_T]", _T], bool]
+
+# We subclass this here to keep the protocol's qualified name clean.
+class AttrsInstance(AttrsInstance_, Protocol):
+ pass
+
+_A = TypeVar("_A", bound=type[AttrsInstance])
+
+class _Nothing(enum.Enum):
+ NOTHING = enum.auto()
+
+NOTHING = _Nothing.NOTHING
+
+# NOTE: Factory lies about its return type to make this possible:
+# `x: List[int] # = Factory(list)`
+# Work around mypy issue #4554 in the common case by using an overload.
+if sys.version_info >= (3, 8):
+ from typing import Literal
+ @overload
+ def Factory(factory: Callable[[], _T]) -> _T: ...
+ @overload
+ def Factory(
+ factory: Callable[[Any], _T],
+ takes_self: Literal[True],
+ ) -> _T: ...
+ @overload
+ def Factory(
+ factory: Callable[[], _T],
+ takes_self: Literal[False],
+ ) -> _T: ...
+
+else:
+ @overload
+ def Factory(factory: Callable[[], _T]) -> _T: ...
+ @overload
+ def Factory(
+ factory: Union[Callable[[Any], _T], Callable[[], _T]],
+ takes_self: bool = ...,
+ ) -> _T: ...
+
+In = TypeVar("In")
+Out = TypeVar("Out")
+
+class Converter(Generic[In, Out]):
+ @overload
+ def __init__(self, converter: Callable[[In], Out]) -> None: ...
+ @overload
+ def __init__(
+ self,
+ converter: Callable[[In, AttrsInstance, Attribute], Out],
+ *,
+ takes_self: Literal[True],
+ takes_field: Literal[True],
+ ) -> None: ...
+ @overload
+ def __init__(
+ self,
+ converter: Callable[[In, Attribute], Out],
+ *,
+ takes_field: Literal[True],
+ ) -> None: ...
+ @overload
+ def __init__(
+ self,
+ converter: Callable[[In, AttrsInstance], Out],
+ *,
+ takes_self: Literal[True],
+ ) -> None: ...
+
+class Attribute(Generic[_T]):
+ name: str
+ default: _T | None
+ validator: _ValidatorType[_T] | None
+ repr: _ReprArgType
+ cmp: _EqOrderType
+ eq: _EqOrderType
+ order: _EqOrderType
+ hash: bool | None
+ init: bool
+ converter: _ConverterType | Converter[Any, _T] | None
+ metadata: dict[Any, Any]
+ type: type[_T] | None
+ kw_only: bool
+ on_setattr: _OnSetAttrType
+ alias: str | None
+
+ def evolve(self, **changes: Any) -> "Attribute[Any]": ...
+
+# NOTE: We had several choices for the annotation to use for type arg:
+# 1) Type[_T]
+# - Pros: Handles simple cases correctly
+# - Cons: Might produce less informative errors in the case of conflicting
+# TypeVars e.g. `attr.ib(default='bad', type=int)`
+# 2) Callable[..., _T]
+# - Pros: Better error messages than #1 for conflicting TypeVars
+# - Cons: Terrible error messages for validator checks.
+# e.g. attr.ib(type=int, validator=validate_str)
+# -> error: Cannot infer function type argument
+# 3) type (and do all of the work in the mypy plugin)
+# - Pros: Simple here, and we could customize the plugin with our own errors.
+# - Cons: Would need to write mypy plugin code to handle all the cases.
+# We chose option #1.
+
+# `attr` lies about its return type to make the following possible:
+# attr() -> Any
+# attr(8) -> int
+# attr(validator=) -> Whatever the callable expects.
+# This makes this type of assignments possible:
+# x: int = attr(8)
+#
+# This form catches explicit None or no default but with no other arguments
+# returns Any.
+@overload
+def attrib(
+ default: None = ...,
+ validator: None = ...,
+ repr: _ReprArgType = ...,
+ cmp: _EqOrderType | None = ...,
+ hash: bool | None = ...,
+ init: bool = ...,
+ metadata: Mapping[Any, Any] | None = ...,
+ type: None = ...,
+ converter: None = ...,
+ factory: None = ...,
+ kw_only: bool = ...,
+ eq: _EqOrderType | None = ...,
+ order: _EqOrderType | None = ...,
+ on_setattr: _OnSetAttrArgType | None = ...,
+ alias: str | None = ...,
+) -> Any: ...
+
+# This form catches an explicit None or no default and infers the type from the
+# other arguments.
+@overload
+def attrib(
+ default: None = ...,
+ validator: _ValidatorArgType[_T] | None = ...,
+ repr: _ReprArgType = ...,
+ cmp: _EqOrderType | None = ...,
+ hash: bool | None = ...,
+ init: bool = ...,
+ metadata: Mapping[Any, Any] | None = ...,
+ type: type[_T] | None = ...,
+ converter: _ConverterType | Converter[Any, _T] | None = ...,
+ factory: Callable[[], _T] | None = ...,
+ kw_only: bool = ...,
+ eq: _EqOrderType | None = ...,
+ order: _EqOrderType | None = ...,
+ on_setattr: _OnSetAttrArgType | None = ...,
+ alias: str | None = ...,
+) -> _T: ...
+
+# This form catches an explicit default argument.
+@overload
+def attrib(
+ default: _T,
+ validator: _ValidatorArgType[_T] | None = ...,
+ repr: _ReprArgType = ...,
+ cmp: _EqOrderType | None = ...,
+ hash: bool | None = ...,
+ init: bool = ...,
+ metadata: Mapping[Any, Any] | None = ...,
+ type: type[_T] | None = ...,
+ converter: _ConverterType | Converter[Any, _T] | None = ...,
+ factory: Callable[[], _T] | None = ...,
+ kw_only: bool = ...,
+ eq: _EqOrderType | None = ...,
+ order: _EqOrderType | None = ...,
+ on_setattr: _OnSetAttrArgType | None = ...,
+ alias: str | None = ...,
+) -> _T: ...
+
+# This form covers type=non-Type: e.g. forward references (str), Any
+@overload
+def attrib(
+ default: _T | None = ...,
+ validator: _ValidatorArgType[_T] | None = ...,
+ repr: _ReprArgType = ...,
+ cmp: _EqOrderType | None = ...,
+ hash: bool | None = ...,
+ init: bool = ...,
+ metadata: Mapping[Any, Any] | None = ...,
+ type: object = ...,
+ converter: _ConverterType | Converter[Any, _T] | None = ...,
+ factory: Callable[[], _T] | None = ...,
+ kw_only: bool = ...,
+ eq: _EqOrderType | None = ...,
+ order: _EqOrderType | None = ...,
+ on_setattr: _OnSetAttrArgType | None = ...,
+ alias: str | None = ...,
+) -> Any: ...
+@overload
+@dataclass_transform(order_default=True, field_specifiers=(attrib, field))
+def attrs(
+ maybe_cls: _C,
+ these: dict[str, Any] | None = ...,
+ repr_ns: str | None = ...,
+ repr: bool = ...,
+ cmp: _EqOrderType | None = ...,
+ hash: bool | None = ...,
+ init: bool = ...,
+ slots: bool = ...,
+ frozen: bool = ...,
+ weakref_slot: bool = ...,
+ str: bool = ...,
+ auto_attribs: bool = ...,
+ kw_only: bool = ...,
+ cache_hash: bool = ...,
+ auto_exc: bool = ...,
+ eq: _EqOrderType | None = ...,
+ order: _EqOrderType | None = ...,
+ auto_detect: bool = ...,
+ collect_by_mro: bool = ...,
+ getstate_setstate: bool | None = ...,
+ on_setattr: _OnSetAttrArgType | None = ...,
+ field_transformer: _FieldTransformer | None = ...,
+ match_args: bool = ...,
+ unsafe_hash: bool | None = ...,
+) -> _C: ...
+@overload
+@dataclass_transform(order_default=True, field_specifiers=(attrib, field))
+def attrs(
+ maybe_cls: None = ...,
+ these: dict[str, Any] | None = ...,
+ repr_ns: str | None = ...,
+ repr: bool = ...,
+ cmp: _EqOrderType | None = ...,
+ hash: bool | None = ...,
+ init: bool = ...,
+ slots: bool = ...,
+ frozen: bool = ...,
+ weakref_slot: bool = ...,
+ str: bool = ...,
+ auto_attribs: bool = ...,
+ kw_only: bool = ...,
+ cache_hash: bool = ...,
+ auto_exc: bool = ...,
+ eq: _EqOrderType | None = ...,
+ order: _EqOrderType | None = ...,
+ auto_detect: bool = ...,
+ collect_by_mro: bool = ...,
+ getstate_setstate: bool | None = ...,
+ on_setattr: _OnSetAttrArgType | None = ...,
+ field_transformer: _FieldTransformer | None = ...,
+ match_args: bool = ...,
+ unsafe_hash: bool | None = ...,
+) -> Callable[[_C], _C]: ...
+def fields(cls: type[AttrsInstance]) -> Any: ...
+def fields_dict(cls: type[AttrsInstance]) -> dict[str, Attribute[Any]]: ...
+def validate(inst: AttrsInstance) -> None: ...
+def resolve_types(
+ cls: _A,
+ globalns: dict[str, Any] | None = ...,
+ localns: dict[str, Any] | None = ...,
+ attribs: list[Attribute[Any]] | None = ...,
+ include_extras: bool = ...,
+) -> _A: ...
+
+# TODO: add support for returning a proper attrs class from the mypy plugin
+# we use Any instead of _CountingAttr so that e.g. `make_class('Foo',
+# [attr.ib()])` is valid
+def make_class(
+ name: str,
+ attrs: list[str] | tuple[str, ...] | dict[str, Any],
+ bases: tuple[type, ...] = ...,
+ class_body: dict[str, Any] | None = ...,
+ repr_ns: str | None = ...,
+ repr: bool = ...,
+ cmp: _EqOrderType | None = ...,
+ hash: bool | None = ...,
+ init: bool = ...,
+ slots: bool = ...,
+ frozen: bool = ...,
+ weakref_slot: bool = ...,
+ str: bool = ...,
+ auto_attribs: bool = ...,
+ kw_only: bool = ...,
+ cache_hash: bool = ...,
+ auto_exc: bool = ...,
+ eq: _EqOrderType | None = ...,
+ order: _EqOrderType | None = ...,
+ collect_by_mro: bool = ...,
+ on_setattr: _OnSetAttrArgType | None = ...,
+ field_transformer: _FieldTransformer | None = ...,
+) -> type: ...
+
+# _funcs --
+
+# TODO: add support for returning TypedDict from the mypy plugin
+# FIXME: asdict/astuple do not honor their factory args. Waiting on one of
+# these:
+# https://github.com/python/mypy/issues/4236
+# https://github.com/python/typing/issues/253
+# XXX: remember to fix attrs.asdict/astuple too!
+def asdict(
+ inst: AttrsInstance,
+ recurse: bool = ...,
+ filter: _FilterType[Any] | None = ...,
+ dict_factory: type[Mapping[Any, Any]] = ...,
+ retain_collection_types: bool = ...,
+ value_serializer: Callable[[type, Attribute[Any], Any], Any] | None = ...,
+ tuple_keys: bool | None = ...,
+) -> dict[str, Any]: ...
+
+# TODO: add support for returning NamedTuple from the mypy plugin
+def astuple(
+ inst: AttrsInstance,
+ recurse: bool = ...,
+ filter: _FilterType[Any] | None = ...,
+ tuple_factory: type[Sequence[Any]] = ...,
+ retain_collection_types: bool = ...,
+) -> tuple[Any, ...]: ...
+def has(cls: type) -> TypeGuard[type[AttrsInstance]]: ...
+def assoc(inst: _T, **changes: Any) -> _T: ...
+def evolve(inst: _T, **changes: Any) -> _T: ...
+
+# _config --
+
+def set_run_validators(run: bool) -> None: ...
+def get_run_validators() -> bool: ...
+
+# aliases --
+
+s = attributes = attrs
+ib = attr = attrib
+dataclass = attrs # Technically, partial(attrs, auto_attribs=True) ;)
diff --git a/lambdas/aws-dd-forwarder-3.127.0/attr/_cmp.py b/lambdas/aws-dd-forwarder-3.127.0/attr/_cmp.py
new file mode 100644
index 0000000..f367bb3
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/attr/_cmp.py
@@ -0,0 +1,160 @@
+# SPDX-License-Identifier: MIT
+
+
+import functools
+import types
+
+from ._make import _make_ne
+
+
+_operation_names = {"eq": "==", "lt": "<", "le": "<=", "gt": ">", "ge": ">="}
+
+
+def cmp_using(
+ eq=None,
+ lt=None,
+ le=None,
+ gt=None,
+ ge=None,
+ require_same_type=True,
+ class_name="Comparable",
+):
+ """
+ Create a class that can be passed into `attrs.field`'s ``eq``, ``order``,
+ and ``cmp`` arguments to customize field comparison.
+
+ The resulting class will have a full set of ordering methods if at least
+ one of ``{lt, le, gt, ge}`` and ``eq`` are provided.
+
+ Args:
+ eq (typing.Callable | None):
+ Callable used to evaluate equality of two objects.
+
+ lt (typing.Callable | None):
+ Callable used to evaluate whether one object is less than another
+ object.
+
+ le (typing.Callable | None):
+ Callable used to evaluate whether one object is less than or equal
+ to another object.
+
+ gt (typing.Callable | None):
+ Callable used to evaluate whether one object is greater than
+ another object.
+
+ ge (typing.Callable | None):
+ Callable used to evaluate whether one object is greater than or
+ equal to another object.
+
+ require_same_type (bool):
+ When `True`, equality and ordering methods will return
+ `NotImplemented` if objects are not of the same type.
+
+ class_name (str | None): Name of class. Defaults to "Comparable".
+
+ See `comparison` for more details.
+
+ .. versionadded:: 21.1.0
+ """
+
+ body = {
+ "__slots__": ["value"],
+ "__init__": _make_init(),
+ "_requirements": [],
+ "_is_comparable_to": _is_comparable_to,
+ }
+
+ # Add operations.
+ num_order_functions = 0
+ has_eq_function = False
+
+ if eq is not None:
+ has_eq_function = True
+ body["__eq__"] = _make_operator("eq", eq)
+ body["__ne__"] = _make_ne()
+
+ if lt is not None:
+ num_order_functions += 1
+ body["__lt__"] = _make_operator("lt", lt)
+
+ if le is not None:
+ num_order_functions += 1
+ body["__le__"] = _make_operator("le", le)
+
+ if gt is not None:
+ num_order_functions += 1
+ body["__gt__"] = _make_operator("gt", gt)
+
+ if ge is not None:
+ num_order_functions += 1
+ body["__ge__"] = _make_operator("ge", ge)
+
+ type_ = types.new_class(
+ class_name, (object,), {}, lambda ns: ns.update(body)
+ )
+
+ # Add same type requirement.
+ if require_same_type:
+ type_._requirements.append(_check_same_type)
+
+ # Add total ordering if at least one operation was defined.
+ if 0 < num_order_functions < 4:
+ if not has_eq_function:
+ # functools.total_ordering requires __eq__ to be defined,
+ # so raise early error here to keep a nice stack.
+ msg = "eq must be define is order to complete ordering from lt, le, gt, ge."
+ raise ValueError(msg)
+ type_ = functools.total_ordering(type_)
+
+ return type_
+
+
+def _make_init():
+ """
+ Create __init__ method.
+ """
+
+ def __init__(self, value):
+ """
+ Initialize object with *value*.
+ """
+ self.value = value
+
+ return __init__
+
+
+def _make_operator(name, func):
+ """
+ Create operator method.
+ """
+
+ def method(self, other):
+ if not self._is_comparable_to(other):
+ return NotImplemented
+
+ result = func(self.value, other.value)
+ if result is NotImplemented:
+ return NotImplemented
+
+ return result
+
+ method.__name__ = f"__{name}__"
+ method.__doc__ = (
+ f"Return a {_operation_names[name]} b. Computed by attrs."
+ )
+
+ return method
+
+
+def _is_comparable_to(self, other):
+ """
+ Check whether `other` is comparable to `self`.
+ """
+ return all(func(self, other) for func in self._requirements)
+
+
+def _check_same_type(self, other):
+ """
+ Return True if *self* and *other* are of the same type, False otherwise.
+ """
+ return other.value.__class__ is self.value.__class__
diff --git a/lambdas/aws-dd-forwarder-3.127.0/attr/_cmp.pyi b/lambdas/aws-dd-forwarder-3.127.0/attr/_cmp.pyi
new file mode 100644
index 0000000..cc7893b
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/attr/_cmp.pyi
@@ -0,0 +1,13 @@
+from typing import Any, Callable
+
+_CompareWithType = Callable[[Any, Any], bool]
+
+def cmp_using(
+ eq: _CompareWithType | None = ...,
+ lt: _CompareWithType | None = ...,
+ le: _CompareWithType | None = ...,
+ gt: _CompareWithType | None = ...,
+ ge: _CompareWithType | None = ...,
+ require_same_type: bool = ...,
+ class_name: str = ...,
+) -> type: ...
diff --git a/lambdas/aws-dd-forwarder-3.127.0/attr/_compat.py b/lambdas/aws-dd-forwarder-3.127.0/attr/_compat.py
new file mode 100644
index 0000000..104eeb0
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/attr/_compat.py
@@ -0,0 +1,103 @@
+# SPDX-License-Identifier: MIT
+
+import inspect
+import platform
+import sys
+import threading
+
+from collections.abc import Mapping, Sequence # noqa: F401
+from typing import _GenericAlias
+
+
+PYPY = platform.python_implementation() == "PyPy"
+PY_3_8_PLUS = sys.version_info[:2] >= (3, 8)
+PY_3_9_PLUS = sys.version_info[:2] >= (3, 9)
+PY_3_10_PLUS = sys.version_info[:2] >= (3, 10)
+PY_3_11_PLUS = sys.version_info[:2] >= (3, 11)
+PY_3_12_PLUS = sys.version_info[:2] >= (3, 12)
+PY_3_13_PLUS = sys.version_info[:2] >= (3, 13)
+PY_3_14_PLUS = sys.version_info[:2] >= (3, 14)
+
+
+if sys.version_info < (3, 8):
+ try:
+ from typing_extensions import Protocol
+ except ImportError: # pragma: no cover
+ Protocol = object
+else:
+ from typing import Protocol # noqa: F401
+
+if PY_3_14_PLUS: # pragma: no cover
+ import annotationlib
+
+ _get_annotations = annotationlib.get_annotations
+
+else:
+
+ def _get_annotations(cls):
+ """
+ Get annotations for *cls*.
+ """
+ return cls.__dict__.get("__annotations__", {})
+
+
+class _AnnotationExtractor:
+ """
+ Extract type annotations from a callable, returning None whenever there
+ is none.
+ """
+
+ __slots__ = ["sig"]
+
+ def __init__(self, callable):
+ try:
+ self.sig = inspect.signature(callable)
+ except (ValueError, TypeError): # inspect failed
+ self.sig = None
+
+ def get_first_param_type(self):
+ """
+ Return the type annotation of the first argument if it's not empty.
+ """
+ if not self.sig:
+ return None
+
+ params = list(self.sig.parameters.values())
+ if params and params[0].annotation is not inspect.Parameter.empty:
+ return params[0].annotation
+
+ return None
+
+ def get_return_type(self):
+ """
+ Return the return type if it's not empty.
+ """
+ if (
+ self.sig
+ and self.sig.return_annotation is not inspect.Signature.empty
+ ):
+ return self.sig.return_annotation
+
+ return None
+
+
+# Thread-local global to track attrs instances which are already being repr'd.
+# This is needed because there is no other (thread-safe) way to pass info
+# about the instances that are already being repr'd through the call stack
+# in order to ensure we don't perform infinite recursion.
+#
+# For instance, if an instance contains a dict which contains that instance,
+# we need to know that we're already repr'ing the outside instance from within
+# the dict's repr() call.
+#
+# This lives here rather than in _make.py so that the functions in _make.py
+# don't have a direct reference to the thread-local in their globals dict.
+# If they have such a reference, it breaks cloudpickle.
+repr_context = threading.local()
+
+
+def get_generic_base(cl):
+ """If this is a generic class (A[str]), return the generic base for it."""
+ if cl.__class__ is _GenericAlias:
+ return cl.__origin__
+ return None
diff --git a/lambdas/aws-dd-forwarder-3.127.0/attr/_config.py b/lambdas/aws-dd-forwarder-3.127.0/attr/_config.py
new file mode 100644
index 0000000..9c245b1
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/attr/_config.py
@@ -0,0 +1,31 @@
+# SPDX-License-Identifier: MIT
+
+__all__ = ["set_run_validators", "get_run_validators"]
+
+_run_validators = True
+
+
+def set_run_validators(run):
+ """
+ Set whether or not validators are run. By default, they are run.
+
+ .. deprecated:: 21.3.0 It will not be removed, but it also will not be
+ moved to new ``attrs`` namespace. Use `attrs.validators.set_disabled()`
+ instead.
+ """
+ if not isinstance(run, bool):
+ msg = "'run' must be bool."
+ raise TypeError(msg)
+ global _run_validators
+ _run_validators = run
+
+
+def get_run_validators():
+ """
+ Return whether or not validators are run.
+
+ .. deprecated:: 21.3.0 It will not be removed, but it also will not be
+ moved to new ``attrs`` namespace. Use `attrs.validators.get_disabled()`
+ instead.
+ """
+ return _run_validators
diff --git a/lambdas/aws-dd-forwarder-3.127.0/attr/_funcs.py b/lambdas/aws-dd-forwarder-3.127.0/attr/_funcs.py
new file mode 100644
index 0000000..355cef4
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/attr/_funcs.py
@@ -0,0 +1,522 @@
+# SPDX-License-Identifier: MIT
+
+
+import copy
+
+from ._compat import PY_3_9_PLUS, get_generic_base
+from ._make import _OBJ_SETATTR, NOTHING, fields
+from .exceptions import AttrsAttributeNotFoundError
+
+
+def asdict(
+ inst,
+ recurse=True,
+ filter=None,
+ dict_factory=dict,
+ retain_collection_types=False,
+ value_serializer=None,
+):
+ """
+ Return the *attrs* attribute values of *inst* as a dict.
+
+ Optionally recurse into other *attrs*-decorated classes.
+
+ Args:
+ inst: Instance of an *attrs*-decorated class.
+
+ recurse (bool): Recurse into classes that are also *attrs*-decorated.
+
+ filter (~typing.Callable):
+ A callable whose return code determines whether an attribute or
+ element is included (`True`) or dropped (`False`). Is called with
+ the `attrs.Attribute` as the first argument and the value as the
+ second argument.
+
+ dict_factory (~typing.Callable):
+ A callable to produce dictionaries from. For example, to produce
+ ordered dictionaries instead of normal Python dictionaries, pass in
+ ``collections.OrderedDict``.
+
+ retain_collection_types (bool):
+ Do not convert to `list` when encountering an attribute whose type
+ is `tuple` or `set`. Only meaningful if *recurse* is `True`.
+
+ value_serializer (typing.Callable | None):
+ A hook that is called for every attribute or dict key/value. It
+ receives the current instance, field and value and must return the
+ (updated) value. The hook is run *after* the optional *filter* has
+ been applied.
+
+ Returns:
+ Return type of *dict_factory*.
+
+ Raises:
+ attrs.exceptions.NotAnAttrsClassError:
+ If *cls* is not an *attrs* class.
+
+ .. versionadded:: 16.0.0 *dict_factory*
+ .. versionadded:: 16.1.0 *retain_collection_types*
+ .. versionadded:: 20.3.0 *value_serializer*
+ .. versionadded:: 21.3.0
+ If a dict has a collection for a key, it is serialized as a tuple.
+ """
+ attrs = fields(inst.__class__)
+ rv = dict_factory()
+ for a in attrs:
+ v = getattr(inst, a.name)
+ if filter is not None and not filter(a, v):
+ continue
+
+ if value_serializer is not None:
+ v = value_serializer(inst, a, v)
+
+ if recurse is True:
+ if has(v.__class__):
+ rv[a.name] = asdict(
+ v,
+ recurse=True,
+ filter=filter,
+ dict_factory=dict_factory,
+ retain_collection_types=retain_collection_types,
+ value_serializer=value_serializer,
+ )
+ elif isinstance(v, (tuple, list, set, frozenset)):
+ cf = v.__class__ if retain_collection_types is True else list
+ items = [
+ _asdict_anything(
+ i,
+ is_key=False,
+ filter=filter,
+ dict_factory=dict_factory,
+ retain_collection_types=retain_collection_types,
+ value_serializer=value_serializer,
+ )
+ for i in v
+ ]
+ try:
+ rv[a.name] = cf(items)
+ except TypeError:
+ if not issubclass(cf, tuple):
+ raise
+ # Workaround for TypeError: cf.__new__() missing 1 required
+ # positional argument (which appears, for a namedturle)
+ rv[a.name] = cf(*items)
+ elif isinstance(v, dict):
+ df = dict_factory
+ rv[a.name] = df(
+ (
+ _asdict_anything(
+ kk,
+ is_key=True,
+ filter=filter,
+ dict_factory=df,
+ retain_collection_types=retain_collection_types,
+ value_serializer=value_serializer,
+ ),
+ _asdict_anything(
+ vv,
+ is_key=False,
+ filter=filter,
+ dict_factory=df,
+ retain_collection_types=retain_collection_types,
+ value_serializer=value_serializer,
+ ),
+ )
+ for kk, vv in v.items()
+ )
+ else:
+ rv[a.name] = v
+ else:
+ rv[a.name] = v
+ return rv
+
+
+def _asdict_anything(
+ val,
+ is_key,
+ filter,
+ dict_factory,
+ retain_collection_types,
+ value_serializer,
+):
+ """
+ ``asdict`` only works on attrs instances, this works on anything.
+ """
+ if getattr(val.__class__, "__attrs_attrs__", None) is not None:
+ # Attrs class.
+ rv = asdict(
+ val,
+ recurse=True,
+ filter=filter,
+ dict_factory=dict_factory,
+ retain_collection_types=retain_collection_types,
+ value_serializer=value_serializer,
+ )
+ elif isinstance(val, (tuple, list, set, frozenset)):
+ if retain_collection_types is True:
+ cf = val.__class__
+ elif is_key:
+ cf = tuple
+ else:
+ cf = list
+
+ rv = cf(
+ [
+ _asdict_anything(
+ i,
+ is_key=False,
+ filter=filter,
+ dict_factory=dict_factory,
+ retain_collection_types=retain_collection_types,
+ value_serializer=value_serializer,
+ )
+ for i in val
+ ]
+ )
+ elif isinstance(val, dict):
+ df = dict_factory
+ rv = df(
+ (
+ _asdict_anything(
+ kk,
+ is_key=True,
+ filter=filter,
+ dict_factory=df,
+ retain_collection_types=retain_collection_types,
+ value_serializer=value_serializer,
+ ),
+ _asdict_anything(
+ vv,
+ is_key=False,
+ filter=filter,
+ dict_factory=df,
+ retain_collection_types=retain_collection_types,
+ value_serializer=value_serializer,
+ ),
+ )
+ for kk, vv in val.items()
+ )
+ else:
+ rv = val
+ if value_serializer is not None:
+ rv = value_serializer(None, None, rv)
+
+ return rv
+
+
+def astuple(
+ inst,
+ recurse=True,
+ filter=None,
+ tuple_factory=tuple,
+ retain_collection_types=False,
+):
+ """
+ Return the *attrs* attribute values of *inst* as a tuple.
+
+ Optionally recurse into other *attrs*-decorated classes.
+
+ Args:
+ inst: Instance of an *attrs*-decorated class.
+
+ recurse (bool):
+ Recurse into classes that are also *attrs*-decorated.
+
+ filter (~typing.Callable):
+ A callable whose return code determines whether an attribute or
+ element is included (`True`) or dropped (`False`). Is called with
+ the `attrs.Attribute` as the first argument and the value as the
+ second argument.
+
+ tuple_factory (~typing.Callable):
+ A callable to produce tuples from. For example, to produce lists
+ instead of tuples.
+
+ retain_collection_types (bool):
+ Do not convert to `list` or `dict` when encountering an attribute
+ which type is `tuple`, `dict` or `set`. Only meaningful if
+ *recurse* is `True`.
+
+ Returns:
+ Return type of *tuple_factory*
+
+ Raises:
+ attrs.exceptions.NotAnAttrsClassError:
+ If *cls* is not an *attrs* class.
+
+ .. versionadded:: 16.2.0
+ """
+ attrs = fields(inst.__class__)
+ rv = []
+ retain = retain_collection_types # Very long. :/
+ for a in attrs:
+ v = getattr(inst, a.name)
+ if filter is not None and not filter(a, v):
+ continue
+ if recurse is True:
+ if has(v.__class__):
+ rv.append(
+ astuple(
+ v,
+ recurse=True,
+ filter=filter,
+ tuple_factory=tuple_factory,
+ retain_collection_types=retain,
+ )
+ )
+ elif isinstance(v, (tuple, list, set, frozenset)):
+ cf = v.__class__ if retain is True else list
+ items = [
+ (
+ astuple(
+ j,
+ recurse=True,
+ filter=filter,
+ tuple_factory=tuple_factory,
+ retain_collection_types=retain,
+ )
+ if has(j.__class__)
+ else j
+ )
+ for j in v
+ ]
+ try:
+ rv.append(cf(items))
+ except TypeError:
+ if not issubclass(cf, tuple):
+ raise
+ # Workaround for TypeError: cf.__new__() missing 1 required
+ # positional argument (which appears, for a namedturle)
+ rv.append(cf(*items))
+ elif isinstance(v, dict):
+ df = v.__class__ if retain is True else dict
+ rv.append(
+ df(
+ (
+ (
+ astuple(
+ kk,
+ tuple_factory=tuple_factory,
+ retain_collection_types=retain,
+ )
+ if has(kk.__class__)
+ else kk
+ ),
+ (
+ astuple(
+ vv,
+ tuple_factory=tuple_factory,
+ retain_collection_types=retain,
+ )
+ if has(vv.__class__)
+ else vv
+ ),
+ )
+ for kk, vv in v.items()
+ )
+ )
+ else:
+ rv.append(v)
+ else:
+ rv.append(v)
+
+ return rv if tuple_factory is list else tuple_factory(rv)
+
+
+def has(cls):
+ """
+ Check whether *cls* is a class with *attrs* attributes.
+
+ Args:
+ cls (type): Class to introspect.
+
+ Raises:
+ TypeError: If *cls* is not a class.
+
+ Returns:
+ bool:
+ """
+ attrs = getattr(cls, "__attrs_attrs__", None)
+ if attrs is not None:
+ return True
+
+ # No attrs, maybe it's a specialized generic (A[str])?
+ generic_base = get_generic_base(cls)
+ if generic_base is not None:
+ generic_attrs = getattr(generic_base, "__attrs_attrs__", None)
+ if generic_attrs is not None:
+ # Stick it on here for speed next time.
+ cls.__attrs_attrs__ = generic_attrs
+ return generic_attrs is not None
+ return False
+
+
+def assoc(inst, **changes):
+ """
+ Copy *inst* and apply *changes*.
+
+ This is different from `evolve` that applies the changes to the arguments
+ that create the new instance.
+
+ `evolve`'s behavior is preferable, but there are `edge cases`_ where it
+ doesn't work. Therefore `assoc` is deprecated, but will not be removed.
+
+ .. _`edge cases`: https://github.com/python-attrs/attrs/issues/251
+
+ Args:
+ inst: Instance of a class with *attrs* attributes.
+
+ changes: Keyword changes in the new copy.
+
+ Returns:
+ A copy of inst with *changes* incorporated.
+
+ Raises:
+ attrs.exceptions.AttrsAttributeNotFoundError:
+ If *attr_name* couldn't be found on *cls*.
+
+ attrs.exceptions.NotAnAttrsClassError:
+ If *cls* is not an *attrs* class.
+
+ .. deprecated:: 17.1.0
+ Use `attrs.evolve` instead if you can. This function will not be
+ removed du to the slightly different approach compared to
+ `attrs.evolve`, though.
+ """
+ new = copy.copy(inst)
+ attrs = fields(inst.__class__)
+ for k, v in changes.items():
+ a = getattr(attrs, k, NOTHING)
+ if a is NOTHING:
+ msg = f"{k} is not an attrs attribute on {new.__class__}."
+ raise AttrsAttributeNotFoundError(msg)
+ _OBJ_SETATTR(new, k, v)
+ return new
+
+
+def evolve(*args, **changes):
+ """
+ Create a new instance, based on the first positional argument with
+ *changes* applied.
+
+ Args:
+
+ inst:
+ Instance of a class with *attrs* attributes. *inst* must be passed
+ as a positional argument.
+
+ changes:
+ Keyword changes in the new copy.
+
+ Returns:
+ A copy of inst with *changes* incorporated.
+
+ Raises:
+ TypeError:
+ If *attr_name* couldn't be found in the class ``__init__``.
+
+ attrs.exceptions.NotAnAttrsClassError:
+ If *cls* is not an *attrs* class.
+
+ .. versionadded:: 17.1.0
+ .. deprecated:: 23.1.0
+ It is now deprecated to pass the instance using the keyword argument
+ *inst*. It will raise a warning until at least April 2024, after which
+ it will become an error. Always pass the instance as a positional
+ argument.
+ .. versionchanged:: 24.1.0
+ *inst* can't be passed as a keyword argument anymore.
+ """
+ try:
+ (inst,) = args
+ except ValueError:
+ msg = (
+ f"evolve() takes 1 positional argument, but {len(args)} were given"
+ )
+ raise TypeError(msg) from None
+
+ cls = inst.__class__
+ attrs = fields(cls)
+ for a in attrs:
+ if not a.init:
+ continue
+ attr_name = a.name # To deal with private attributes.
+ init_name = a.alias
+ if init_name not in changes:
+ changes[init_name] = getattr(inst, attr_name)
+
+ return cls(**changes)
+
+
+def resolve_types(
+ cls, globalns=None, localns=None, attribs=None, include_extras=True
+):
+ """
+ Resolve any strings and forward annotations in type annotations.
+
+ This is only required if you need concrete types in :class:`Attribute`'s
+ *type* field. In other words, you don't need to resolve your types if you
+ only use them for static type checking.
+
+ With no arguments, names will be looked up in the module in which the class
+ was created. If this is not what you want, for example, if the name only
+ exists inside a method, you may pass *globalns* or *localns* to specify
+ other dictionaries in which to look up these names. See the docs of
+ `typing.get_type_hints` for more details.
+
+ Args:
+ cls (type): Class to resolve.
+
+ globalns (dict | None): Dictionary containing global variables.
+
+ localns (dict | None): Dictionary containing local variables.
+
+ attribs (list | None):
+ List of attribs for the given class. This is necessary when calling
+ from inside a ``field_transformer`` since *cls* is not an *attrs*
+ class yet.
+
+ include_extras (bool):
+ Resolve more accurately, if possible. Pass ``include_extras`` to
+ ``typing.get_hints``, if supported by the typing module. On
+ supported Python versions (3.9+), this resolves the types more
+ accurately.
+
+ Raises:
+ TypeError: If *cls* is not a class.
+
+ attrs.exceptions.NotAnAttrsClassError:
+ If *cls* is not an *attrs* class and you didn't pass any attribs.
+
+ NameError: If types cannot be resolved because of missing variables.
+
+ Returns:
+ *cls* so you can use this function also as a class decorator. Please
+ note that you have to apply it **after** `attrs.define`. That means the
+ decorator has to come in the line **before** `attrs.define`.
+
+ .. versionadded:: 20.1.0
+ .. versionadded:: 21.1.0 *attribs*
+ .. versionadded:: 23.1.0 *include_extras*
+ """
+ # Since calling get_type_hints is expensive we cache whether we've
+ # done it already.
+ if getattr(cls, "__attrs_types_resolved__", None) != cls:
+ import typing
+
+ kwargs = {"globalns": globalns, "localns": localns}
+
+ if PY_3_9_PLUS:
+ kwargs["include_extras"] = include_extras
+
+ hints = typing.get_type_hints(cls, **kwargs)
+ for field in fields(cls) if attribs is None else attribs:
+ if field.name in hints:
+ # Since fields have been frozen we must work around it.
+ _OBJ_SETATTR(field, "type", hints[field.name])
+ # We store the class we resolved so that subclasses know they haven't
+ # been resolved.
+ cls.__attrs_types_resolved__ = cls
+
+ # Return the class so you can use it as a decorator too.
+ return cls
diff --git a/lambdas/aws-dd-forwarder-3.127.0/attr/_make.py b/lambdas/aws-dd-forwarder-3.127.0/attr/_make.py
new file mode 100644
index 0000000..bf00c5f
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/attr/_make.py
@@ -0,0 +1,2960 @@
+# SPDX-License-Identifier: MIT
+
+from __future__ import annotations
+
+import abc
+import contextlib
+import copy
+import enum
+import functools
+import inspect
+import itertools
+import linecache
+import sys
+import types
+import typing
+
+from operator import itemgetter
+
+# We need to import _compat itself in addition to the _compat members to avoid
+# having the thread-local in the globals here.
+from . import _compat, _config, setters
+from ._compat import (
+ PY_3_8_PLUS,
+ PY_3_10_PLUS,
+ PY_3_11_PLUS,
+ _AnnotationExtractor,
+ _get_annotations,
+ get_generic_base,
+)
+from .exceptions import (
+ DefaultAlreadySetError,
+ FrozenInstanceError,
+ NotAnAttrsClassError,
+ UnannotatedAttributeError,
+)
+
+
+# This is used at least twice, so cache it here.
+_OBJ_SETATTR = object.__setattr__
+_INIT_FACTORY_PAT = "__attr_factory_%s"
+_CLASSVAR_PREFIXES = (
+ "typing.ClassVar",
+ "t.ClassVar",
+ "ClassVar",
+ "typing_extensions.ClassVar",
+)
+# we don't use a double-underscore prefix because that triggers
+# name mangling when trying to create a slot for the field
+# (when slots=True)
+_HASH_CACHE_FIELD = "_attrs_cached_hash"
+
+_EMPTY_METADATA_SINGLETON = types.MappingProxyType({})
+
+# Unique object for unequivocal getattr() defaults.
+_SENTINEL = object()
+
+_DEFAULT_ON_SETATTR = setters.pipe(setters.convert, setters.validate)
+
+
+class _Nothing(enum.Enum):
+ """
+ Sentinel to indicate the lack of a value when `None` is ambiguous.
+
+ If extending attrs, you can use ``typing.Literal[NOTHING]`` to show
+ that a value may be ``NOTHING``.
+
+ .. versionchanged:: 21.1.0 ``bool(NOTHING)`` is now False.
+ .. versionchanged:: 22.2.0 ``NOTHING`` is now an ``enum.Enum`` variant.
+ """
+
+ NOTHING = enum.auto()
+
+ def __repr__(self):
+ return "NOTHING"
+
+ def __bool__(self):
+ return False
+
+
+NOTHING = _Nothing.NOTHING
+"""
+Sentinel to indicate the lack of a value when `None` is ambiguous.
+"""
+
+
+class _CacheHashWrapper(int):
+ """
+ An integer subclass that pickles / copies as None
+
+ This is used for non-slots classes with ``cache_hash=True``, to avoid
+ serializing a potentially (even likely) invalid hash value. Since `None`
+ is the default value for uncalculated hashes, whenever this is copied,
+ the copy's value for the hash should automatically reset.
+
+ See GH #613 for more details.
+ """
+
+ def __reduce__(self, _none_constructor=type(None), _args=()): # noqa: B008
+ return _none_constructor, _args
+
+
+def attrib(
+ default=NOTHING,
+ validator=None,
+ repr=True,
+ cmp=None,
+ hash=None,
+ init=True,
+ metadata=None,
+ type=None,
+ converter=None,
+ factory=None,
+ kw_only=False,
+ eq=None,
+ order=None,
+ on_setattr=None,
+ alias=None,
+):
+ """
+ Create a new field / attribute on a class.
+
+ Identical to `attrs.field`, except it's not keyword-only.
+
+ Consider using `attrs.field` in new code (``attr.ib`` will *never* go away,
+ though).
+
+ .. warning::
+
+ Does **nothing** unless the class is also decorated with
+ `attr.s` (or similar)!
+
+
+ .. versionadded:: 15.2.0 *convert*
+ .. versionadded:: 16.3.0 *metadata*
+ .. versionchanged:: 17.1.0 *validator* can be a ``list`` now.
+ .. versionchanged:: 17.1.0
+ *hash* is `None` and therefore mirrors *eq* by default.
+ .. versionadded:: 17.3.0 *type*
+ .. deprecated:: 17.4.0 *convert*
+ .. versionadded:: 17.4.0
+ *converter* as a replacement for the deprecated *convert* to achieve
+ consistency with other noun-based arguments.
+ .. versionadded:: 18.1.0
+ ``factory=f`` is syntactic sugar for ``default=attr.Factory(f)``.
+ .. versionadded:: 18.2.0 *kw_only*
+ .. versionchanged:: 19.2.0 *convert* keyword argument removed.
+ .. versionchanged:: 19.2.0 *repr* also accepts a custom callable.
+ .. deprecated:: 19.2.0 *cmp* Removal on or after 2021-06-01.
+ .. versionadded:: 19.2.0 *eq* and *order*
+ .. versionadded:: 20.1.0 *on_setattr*
+ .. versionchanged:: 20.3.0 *kw_only* backported to Python 2
+ .. versionchanged:: 21.1.0
+ *eq*, *order*, and *cmp* also accept a custom callable
+ .. versionchanged:: 21.1.0 *cmp* undeprecated
+ .. versionadded:: 22.2.0 *alias*
+ """
+ eq, eq_key, order, order_key = _determine_attrib_eq_order(
+ cmp, eq, order, True
+ )
+
+ if hash is not None and hash is not True and hash is not False:
+ msg = "Invalid value for hash. Must be True, False, or None."
+ raise TypeError(msg)
+
+ if factory is not None:
+ if default is not NOTHING:
+ msg = (
+ "The `default` and `factory` arguments are mutually exclusive."
+ )
+ raise ValueError(msg)
+ if not callable(factory):
+ msg = "The `factory` argument must be a callable."
+ raise ValueError(msg)
+ default = Factory(factory)
+
+ if metadata is None:
+ metadata = {}
+
+ # Apply syntactic sugar by auto-wrapping.
+ if isinstance(on_setattr, (list, tuple)):
+ on_setattr = setters.pipe(*on_setattr)
+
+ if validator and isinstance(validator, (list, tuple)):
+ validator = and_(*validator)
+
+ if converter and isinstance(converter, (list, tuple)):
+ converter = pipe(*converter)
+
+ return _CountingAttr(
+ default=default,
+ validator=validator,
+ repr=repr,
+ cmp=None,
+ hash=hash,
+ init=init,
+ converter=converter,
+ metadata=metadata,
+ type=type,
+ kw_only=kw_only,
+ eq=eq,
+ eq_key=eq_key,
+ order=order,
+ order_key=order_key,
+ on_setattr=on_setattr,
+ alias=alias,
+ )
+
+
+def _compile_and_eval(script, globs, locs=None, filename=""):
+ """
+ Evaluate the script with the given global (globs) and local (locs)
+ variables.
+ """
+ bytecode = compile(script, filename, "exec")
+ eval(bytecode, globs, locs)
+
+
+def _make_method(name, script, filename, globs, locals=None):
+ """
+ Create the method with the script given and return the method object.
+ """
+ locs = {} if locals is None else locals
+
+ # In order of debuggers like PDB being able to step through the code,
+ # we add a fake linecache entry.
+ count = 1
+ base_filename = filename
+ while True:
+ linecache_tuple = (
+ len(script),
+ None,
+ script.splitlines(True),
+ filename,
+ )
+ old_val = linecache.cache.setdefault(filename, linecache_tuple)
+ if old_val == linecache_tuple:
+ break
+
+ filename = f"{base_filename[:-1]}-{count}>"
+ count += 1
+
+ _compile_and_eval(script, globs, locs, filename)
+
+ return locs[name]
+
+
+def _make_attr_tuple_class(cls_name, attr_names):
+ """
+ Create a tuple subclass to hold `Attribute`s for an `attrs` class.
+
+ The subclass is a bare tuple with properties for names.
+
+ class MyClassAttributes(tuple):
+ __slots__ = ()
+ x = property(itemgetter(0))
+ """
+ attr_class_name = f"{cls_name}Attributes"
+ attr_class_template = [
+ f"class {attr_class_name}(tuple):",
+ " __slots__ = ()",
+ ]
+ if attr_names:
+ for i, attr_name in enumerate(attr_names):
+ attr_class_template.append(
+ f" {attr_name} = _attrs_property(_attrs_itemgetter({i}))"
+ )
+ else:
+ attr_class_template.append(" pass")
+ globs = {"_attrs_itemgetter": itemgetter, "_attrs_property": property}
+ _compile_and_eval("\n".join(attr_class_template), globs)
+ return globs[attr_class_name]
+
+
+# Tuple class for extracted attributes from a class definition.
+# `base_attrs` is a subset of `attrs`.
+_Attributes = _make_attr_tuple_class(
+ "_Attributes",
+ [
+ # all attributes to build dunder methods for
+ "attrs",
+ # attributes that have been inherited
+ "base_attrs",
+ # map inherited attributes to their originating classes
+ "base_attrs_map",
+ ],
+)
+
+
+def _is_class_var(annot):
+ """
+ Check whether *annot* is a typing.ClassVar.
+
+ The string comparison hack is used to avoid evaluating all string
+ annotations which would put attrs-based classes at a performance
+ disadvantage compared to plain old classes.
+ """
+ annot = str(annot)
+
+ # Annotation can be quoted.
+ if annot.startswith(("'", '"')) and annot.endswith(("'", '"')):
+ annot = annot[1:-1]
+
+ return annot.startswith(_CLASSVAR_PREFIXES)
+
+
+def _has_own_attribute(cls, attrib_name):
+ """
+ Check whether *cls* defines *attrib_name* (and doesn't just inherit it).
+ """
+ return attrib_name in cls.__dict__
+
+
+def _collect_base_attrs(cls, taken_attr_names):
+ """
+ Collect attr.ibs from base classes of *cls*, except *taken_attr_names*.
+ """
+ base_attrs = []
+ base_attr_map = {} # A dictionary of base attrs to their classes.
+
+ # Traverse the MRO and collect attributes.
+ for base_cls in reversed(cls.__mro__[1:-1]):
+ for a in getattr(base_cls, "__attrs_attrs__", []):
+ if a.inherited or a.name in taken_attr_names:
+ continue
+
+ a = a.evolve(inherited=True) # noqa: PLW2901
+ base_attrs.append(a)
+ base_attr_map[a.name] = base_cls
+
+ # For each name, only keep the freshest definition i.e. the furthest at the
+ # back. base_attr_map is fine because it gets overwritten with every new
+ # instance.
+ filtered = []
+ seen = set()
+ for a in reversed(base_attrs):
+ if a.name in seen:
+ continue
+ filtered.insert(0, a)
+ seen.add(a.name)
+
+ return filtered, base_attr_map
+
+
+def _collect_base_attrs_broken(cls, taken_attr_names):
+ """
+ Collect attr.ibs from base classes of *cls*, except *taken_attr_names*.
+
+ N.B. *taken_attr_names* will be mutated.
+
+ Adhere to the old incorrect behavior.
+
+ Notably it collects from the front and considers inherited attributes which
+ leads to the buggy behavior reported in #428.
+ """
+ base_attrs = []
+ base_attr_map = {} # A dictionary of base attrs to their classes.
+
+ # Traverse the MRO and collect attributes.
+ for base_cls in cls.__mro__[1:-1]:
+ for a in getattr(base_cls, "__attrs_attrs__", []):
+ if a.name in taken_attr_names:
+ continue
+
+ a = a.evolve(inherited=True) # noqa: PLW2901
+ taken_attr_names.add(a.name)
+ base_attrs.append(a)
+ base_attr_map[a.name] = base_cls
+
+ return base_attrs, base_attr_map
+
+
+def _transform_attrs(
+ cls, these, auto_attribs, kw_only, collect_by_mro, field_transformer
+):
+ """
+ Transform all `_CountingAttr`s on a class into `Attribute`s.
+
+ If *these* is passed, use that and don't look for them on the class.
+
+ If *collect_by_mro* is True, collect them in the correct MRO order,
+ otherwise use the old -- incorrect -- order. See #428.
+
+ Return an `_Attributes`.
+ """
+ cd = cls.__dict__
+ anns = _get_annotations(cls)
+
+ if these is not None:
+ ca_list = list(these.items())
+ elif auto_attribs is True:
+ ca_names = {
+ name
+ for name, attr in cd.items()
+ if isinstance(attr, _CountingAttr)
+ }
+ ca_list = []
+ annot_names = set()
+ for attr_name, type in anns.items():
+ if _is_class_var(type):
+ continue
+ annot_names.add(attr_name)
+ a = cd.get(attr_name, NOTHING)
+
+ if not isinstance(a, _CountingAttr):
+ a = attrib() if a is NOTHING else attrib(default=a)
+ ca_list.append((attr_name, a))
+
+ unannotated = ca_names - annot_names
+ if len(unannotated) > 0:
+ raise UnannotatedAttributeError(
+ "The following `attr.ib`s lack a type annotation: "
+ + ", ".join(
+ sorted(unannotated, key=lambda n: cd.get(n).counter)
+ )
+ + "."
+ )
+ else:
+ ca_list = sorted(
+ (
+ (name, attr)
+ for name, attr in cd.items()
+ if isinstance(attr, _CountingAttr)
+ ),
+ key=lambda e: e[1].counter,
+ )
+
+ own_attrs = [
+ Attribute.from_counting_attr(
+ name=attr_name, ca=ca, type=anns.get(attr_name)
+ )
+ for attr_name, ca in ca_list
+ ]
+
+ if collect_by_mro:
+ base_attrs, base_attr_map = _collect_base_attrs(
+ cls, {a.name for a in own_attrs}
+ )
+ else:
+ base_attrs, base_attr_map = _collect_base_attrs_broken(
+ cls, {a.name for a in own_attrs}
+ )
+
+ if kw_only:
+ own_attrs = [a.evolve(kw_only=True) for a in own_attrs]
+ base_attrs = [a.evolve(kw_only=True) for a in base_attrs]
+
+ attrs = base_attrs + own_attrs
+
+ # Mandatory vs non-mandatory attr order only matters when they are part of
+ # the __init__ signature and when they aren't kw_only (which are moved to
+ # the end and can be mandatory or non-mandatory in any order, as they will
+ # be specified as keyword args anyway). Check the order of those attrs:
+ had_default = False
+ for a in (a for a in attrs if a.init is not False and a.kw_only is False):
+ if had_default is True and a.default is NOTHING:
+ msg = f"No mandatory attributes allowed after an attribute with a default value or factory. Attribute in question: {a!r}"
+ raise ValueError(msg)
+
+ if had_default is False and a.default is not NOTHING:
+ had_default = True
+
+ if field_transformer is not None:
+ attrs = field_transformer(cls, attrs)
+
+ # Resolve default field alias after executing field_transformer.
+ # This allows field_transformer to differentiate between explicit vs
+ # default aliases and supply their own defaults.
+ attrs = [
+ a.evolve(alias=_default_init_alias_for(a.name)) if not a.alias else a
+ for a in attrs
+ ]
+
+ # Create AttrsClass *after* applying the field_transformer since it may
+ # add or remove attributes!
+ attr_names = [a.name for a in attrs]
+ AttrsClass = _make_attr_tuple_class(cls.__name__, attr_names)
+
+ return _Attributes((AttrsClass(attrs), base_attrs, base_attr_map))
+
+
+def _make_cached_property_getattr(cached_properties, original_getattr, cls):
+ lines = [
+ # Wrapped to get `__class__` into closure cell for super()
+ # (It will be replaced with the newly constructed class after construction).
+ "def wrapper(_cls):",
+ " __class__ = _cls",
+ " def __getattr__(self, item, cached_properties=cached_properties, original_getattr=original_getattr, _cached_setattr_get=_cached_setattr_get):",
+ " func = cached_properties.get(item)",
+ " if func is not None:",
+ " result = func(self)",
+ " _setter = _cached_setattr_get(self)",
+ " _setter(item, result)",
+ " return result",
+ ]
+ if original_getattr is not None:
+ lines.append(
+ " return original_getattr(self, item)",
+ )
+ else:
+ lines.extend(
+ [
+ " try:",
+ " return super().__getattribute__(item)",
+ " except AttributeError:",
+ " if not hasattr(super(), '__getattr__'):",
+ " raise",
+ " return super().__getattr__(item)",
+ " original_error = f\"'{self.__class__.__name__}' object has no attribute '{item}'\"",
+ " raise AttributeError(original_error)",
+ ]
+ )
+
+ lines.extend(
+ [
+ " return __getattr__",
+ "__getattr__ = wrapper(_cls)",
+ ]
+ )
+
+ unique_filename = _generate_unique_filename(cls, "getattr")
+
+ glob = {
+ "cached_properties": cached_properties,
+ "_cached_setattr_get": _OBJ_SETATTR.__get__,
+ "original_getattr": original_getattr,
+ }
+
+ return _make_method(
+ "__getattr__",
+ "\n".join(lines),
+ unique_filename,
+ glob,
+ locals={
+ "_cls": cls,
+ },
+ )
+
+
+def _frozen_setattrs(self, name, value):
+ """
+ Attached to frozen classes as __setattr__.
+ """
+ if isinstance(self, BaseException) and name in (
+ "__cause__",
+ "__context__",
+ "__traceback__",
+ ):
+ BaseException.__setattr__(self, name, value)
+ return
+
+ raise FrozenInstanceError()
+
+
+def _frozen_delattrs(self, name):
+ """
+ Attached to frozen classes as __delattr__.
+ """
+ raise FrozenInstanceError()
+
+
+class _ClassBuilder:
+ """
+ Iteratively build *one* class.
+ """
+
+ __slots__ = (
+ "_attr_names",
+ "_attrs",
+ "_base_attr_map",
+ "_base_names",
+ "_cache_hash",
+ "_cls",
+ "_cls_dict",
+ "_delete_attribs",
+ "_frozen",
+ "_has_pre_init",
+ "_pre_init_has_args",
+ "_has_post_init",
+ "_is_exc",
+ "_on_setattr",
+ "_slots",
+ "_weakref_slot",
+ "_wrote_own_setattr",
+ "_has_custom_setattr",
+ )
+
+ def __init__(
+ self,
+ cls,
+ these,
+ slots,
+ frozen,
+ weakref_slot,
+ getstate_setstate,
+ auto_attribs,
+ kw_only,
+ cache_hash,
+ is_exc,
+ collect_by_mro,
+ on_setattr,
+ has_custom_setattr,
+ field_transformer,
+ ):
+ attrs, base_attrs, base_map = _transform_attrs(
+ cls,
+ these,
+ auto_attribs,
+ kw_only,
+ collect_by_mro,
+ field_transformer,
+ )
+
+ self._cls = cls
+ self._cls_dict = dict(cls.__dict__) if slots else {}
+ self._attrs = attrs
+ self._base_names = {a.name for a in base_attrs}
+ self._base_attr_map = base_map
+ self._attr_names = tuple(a.name for a in attrs)
+ self._slots = slots
+ self._frozen = frozen
+ self._weakref_slot = weakref_slot
+ self._cache_hash = cache_hash
+ self._has_pre_init = bool(getattr(cls, "__attrs_pre_init__", False))
+ self._pre_init_has_args = False
+ if self._has_pre_init:
+ # Check if the pre init method has more arguments than just `self`
+ # We want to pass arguments if pre init expects arguments
+ pre_init_func = cls.__attrs_pre_init__
+ pre_init_signature = inspect.signature(pre_init_func)
+ self._pre_init_has_args = len(pre_init_signature.parameters) > 1
+ self._has_post_init = bool(getattr(cls, "__attrs_post_init__", False))
+ self._delete_attribs = not bool(these)
+ self._is_exc = is_exc
+ self._on_setattr = on_setattr
+
+ self._has_custom_setattr = has_custom_setattr
+ self._wrote_own_setattr = False
+
+ self._cls_dict["__attrs_attrs__"] = self._attrs
+
+ if frozen:
+ self._cls_dict["__setattr__"] = _frozen_setattrs
+ self._cls_dict["__delattr__"] = _frozen_delattrs
+
+ self._wrote_own_setattr = True
+ elif on_setattr in (
+ _DEFAULT_ON_SETATTR,
+ setters.validate,
+ setters.convert,
+ ):
+ has_validator = has_converter = False
+ for a in attrs:
+ if a.validator is not None:
+ has_validator = True
+ if a.converter is not None:
+ has_converter = True
+
+ if has_validator and has_converter:
+ break
+ if (
+ (
+ on_setattr == _DEFAULT_ON_SETATTR
+ and not (has_validator or has_converter)
+ )
+ or (on_setattr == setters.validate and not has_validator)
+ or (on_setattr == setters.convert and not has_converter)
+ ):
+ # If class-level on_setattr is set to convert + validate, but
+ # there's no field to convert or validate, pretend like there's
+ # no on_setattr.
+ self._on_setattr = None
+
+ if getstate_setstate:
+ (
+ self._cls_dict["__getstate__"],
+ self._cls_dict["__setstate__"],
+ ) = self._make_getstate_setstate()
+
+ def __repr__(self):
+ return f"<_ClassBuilder(cls={self._cls.__name__})>"
+
+ def build_class(self):
+ """
+ Finalize class based on the accumulated configuration.
+
+ Builder cannot be used after calling this method.
+ """
+ if self._slots is True:
+ cls = self._create_slots_class()
+ else:
+ cls = self._patch_original_class()
+ if PY_3_10_PLUS:
+ cls = abc.update_abstractmethods(cls)
+
+ # The method gets only called if it's not inherited from a base class.
+ # _has_own_attribute does NOT work properly for classmethods.
+ if (
+ getattr(cls, "__attrs_init_subclass__", None)
+ and "__attrs_init_subclass__" not in cls.__dict__
+ ):
+ cls.__attrs_init_subclass__()
+
+ return cls
+
+ def _patch_original_class(self):
+ """
+ Apply accumulated methods and return the class.
+ """
+ cls = self._cls
+ base_names = self._base_names
+
+ # Clean class of attribute definitions (`attr.ib()`s).
+ if self._delete_attribs:
+ for name in self._attr_names:
+ if (
+ name not in base_names
+ and getattr(cls, name, _SENTINEL) is not _SENTINEL
+ ):
+ # An AttributeError can happen if a base class defines a
+ # class variable and we want to set an attribute with the
+ # same name by using only a type annotation.
+ with contextlib.suppress(AttributeError):
+ delattr(cls, name)
+
+ # Attach our dunder methods.
+ for name, value in self._cls_dict.items():
+ setattr(cls, name, value)
+
+ # If we've inherited an attrs __setattr__ and don't write our own,
+ # reset it to object's.
+ if not self._wrote_own_setattr and getattr(
+ cls, "__attrs_own_setattr__", False
+ ):
+ cls.__attrs_own_setattr__ = False
+
+ if not self._has_custom_setattr:
+ cls.__setattr__ = _OBJ_SETATTR
+
+ return cls
+
+ def _create_slots_class(self):
+ """
+ Build and return a new class with a `__slots__` attribute.
+ """
+ cd = {
+ k: v
+ for k, v in self._cls_dict.items()
+ if k not in (*tuple(self._attr_names), "__dict__", "__weakref__")
+ }
+
+ # If our class doesn't have its own implementation of __setattr__
+ # (either from the user or by us), check the bases, if one of them has
+ # an attrs-made __setattr__, that needs to be reset. We don't walk the
+ # MRO because we only care about our immediate base classes.
+ # XXX: This can be confused by subclassing a slotted attrs class with
+ # XXX: a non-attrs class and subclass the resulting class with an attrs
+ # XXX: class. See `test_slotted_confused` for details. For now that's
+ # XXX: OK with us.
+ if not self._wrote_own_setattr:
+ cd["__attrs_own_setattr__"] = False
+
+ if not self._has_custom_setattr:
+ for base_cls in self._cls.__bases__:
+ if base_cls.__dict__.get("__attrs_own_setattr__", False):
+ cd["__setattr__"] = _OBJ_SETATTR
+ break
+
+ # Traverse the MRO to collect existing slots
+ # and check for an existing __weakref__.
+ existing_slots = {}
+ weakref_inherited = False
+ for base_cls in self._cls.__mro__[1:-1]:
+ if base_cls.__dict__.get("__weakref__", None) is not None:
+ weakref_inherited = True
+ existing_slots.update(
+ {
+ name: getattr(base_cls, name)
+ for name in getattr(base_cls, "__slots__", [])
+ }
+ )
+
+ base_names = set(self._base_names)
+
+ names = self._attr_names
+ if (
+ self._weakref_slot
+ and "__weakref__" not in getattr(self._cls, "__slots__", ())
+ and "__weakref__" not in names
+ and not weakref_inherited
+ ):
+ names += ("__weakref__",)
+
+ if PY_3_8_PLUS:
+ cached_properties = {
+ name: cached_property.func
+ for name, cached_property in cd.items()
+ if isinstance(cached_property, functools.cached_property)
+ }
+ else:
+ # `functools.cached_property` was introduced in 3.8.
+ # So can't be used before this.
+ cached_properties = {}
+
+ # Collect methods with a `__class__` reference that are shadowed in the new class.
+ # To know to update them.
+ additional_closure_functions_to_update = []
+ if cached_properties:
+ class_annotations = _get_annotations(self._cls)
+ for name, func in cached_properties.items():
+ # Add cached properties to names for slotting.
+ names += (name,)
+ # Clear out function from class to avoid clashing.
+ del cd[name]
+ additional_closure_functions_to_update.append(func)
+ annotation = inspect.signature(func).return_annotation
+ if annotation is not inspect.Parameter.empty:
+ class_annotations[name] = annotation
+
+ original_getattr = cd.get("__getattr__")
+ if original_getattr is not None:
+ additional_closure_functions_to_update.append(original_getattr)
+
+ cd["__getattr__"] = _make_cached_property_getattr(
+ cached_properties, original_getattr, self._cls
+ )
+
+ # We only add the names of attributes that aren't inherited.
+ # Setting __slots__ to inherited attributes wastes memory.
+ slot_names = [name for name in names if name not in base_names]
+
+ # There are slots for attributes from current class
+ # that are defined in parent classes.
+ # As their descriptors may be overridden by a child class,
+ # we collect them here and update the class dict
+ reused_slots = {
+ slot: slot_descriptor
+ for slot, slot_descriptor in existing_slots.items()
+ if slot in slot_names
+ }
+ slot_names = [name for name in slot_names if name not in reused_slots]
+ cd.update(reused_slots)
+ if self._cache_hash:
+ slot_names.append(_HASH_CACHE_FIELD)
+
+ cd["__slots__"] = tuple(slot_names)
+
+ cd["__qualname__"] = self._cls.__qualname__
+
+ # Create new class based on old class and our methods.
+ cls = type(self._cls)(self._cls.__name__, self._cls.__bases__, cd)
+
+ # The following is a fix for
+ # .
+ # If a method mentions `__class__` or uses the no-arg super(), the
+ # compiler will bake a reference to the class in the method itself
+ # as `method.__closure__`. Since we replace the class with a
+ # clone, we rewrite these references so it keeps working.
+ for item in itertools.chain(
+ cls.__dict__.values(), additional_closure_functions_to_update
+ ):
+ if isinstance(item, (classmethod, staticmethod)):
+ # Class- and staticmethods hide their functions inside.
+ # These might need to be rewritten as well.
+ closure_cells = getattr(item.__func__, "__closure__", None)
+ elif isinstance(item, property):
+ # Workaround for property `super()` shortcut (PY3-only).
+ # There is no universal way for other descriptors.
+ closure_cells = getattr(item.fget, "__closure__", None)
+ else:
+ closure_cells = getattr(item, "__closure__", None)
+
+ if not closure_cells: # Catch None or the empty list.
+ continue
+ for cell in closure_cells:
+ try:
+ match = cell.cell_contents is self._cls
+ except ValueError: # noqa: PERF203
+ # ValueError: Cell is empty
+ pass
+ else:
+ if match:
+ cell.cell_contents = cls
+ return cls
+
+ def add_repr(self, ns):
+ self._cls_dict["__repr__"] = self._add_method_dunders(
+ _make_repr(self._attrs, ns, self._cls)
+ )
+ return self
+
+ def add_str(self):
+ repr = self._cls_dict.get("__repr__")
+ if repr is None:
+ msg = "__str__ can only be generated if a __repr__ exists."
+ raise ValueError(msg)
+
+ def __str__(self):
+ return self.__repr__()
+
+ self._cls_dict["__str__"] = self._add_method_dunders(__str__)
+ return self
+
+ def _make_getstate_setstate(self):
+ """
+ Create custom __setstate__ and __getstate__ methods.
+ """
+ # __weakref__ is not writable.
+ state_attr_names = tuple(
+ an for an in self._attr_names if an != "__weakref__"
+ )
+
+ def slots_getstate(self):
+ """
+ Automatically created by attrs.
+ """
+ return {name: getattr(self, name) for name in state_attr_names}
+
+ hash_caching_enabled = self._cache_hash
+
+ def slots_setstate(self, state):
+ """
+ Automatically created by attrs.
+ """
+ __bound_setattr = _OBJ_SETATTR.__get__(self)
+ if isinstance(state, tuple):
+ # Backward compatibility with attrs instances pickled with
+ # attrs versions before v22.2.0 which stored tuples.
+ for name, value in zip(state_attr_names, state):
+ __bound_setattr(name, value)
+ else:
+ for name in state_attr_names:
+ if name in state:
+ __bound_setattr(name, state[name])
+
+ # The hash code cache is not included when the object is
+ # serialized, but it still needs to be initialized to None to
+ # indicate that the first call to __hash__ should be a cache
+ # miss.
+ if hash_caching_enabled:
+ __bound_setattr(_HASH_CACHE_FIELD, None)
+
+ return slots_getstate, slots_setstate
+
+ def make_unhashable(self):
+ self._cls_dict["__hash__"] = None
+ return self
+
+ def add_hash(self):
+ self._cls_dict["__hash__"] = self._add_method_dunders(
+ _make_hash(
+ self._cls,
+ self._attrs,
+ frozen=self._frozen,
+ cache_hash=self._cache_hash,
+ )
+ )
+
+ return self
+
+ def add_init(self):
+ self._cls_dict["__init__"] = self._add_method_dunders(
+ _make_init(
+ self._cls,
+ self._attrs,
+ self._has_pre_init,
+ self._pre_init_has_args,
+ self._has_post_init,
+ self._frozen,
+ self._slots,
+ self._cache_hash,
+ self._base_attr_map,
+ self._is_exc,
+ self._on_setattr,
+ attrs_init=False,
+ )
+ )
+
+ return self
+
+ def add_match_args(self):
+ self._cls_dict["__match_args__"] = tuple(
+ field.name
+ for field in self._attrs
+ if field.init and not field.kw_only
+ )
+
+ def add_attrs_init(self):
+ self._cls_dict["__attrs_init__"] = self._add_method_dunders(
+ _make_init(
+ self._cls,
+ self._attrs,
+ self._has_pre_init,
+ self._pre_init_has_args,
+ self._has_post_init,
+ self._frozen,
+ self._slots,
+ self._cache_hash,
+ self._base_attr_map,
+ self._is_exc,
+ self._on_setattr,
+ attrs_init=True,
+ )
+ )
+
+ return self
+
+ def add_eq(self):
+ cd = self._cls_dict
+
+ cd["__eq__"] = self._add_method_dunders(
+ _make_eq(self._cls, self._attrs)
+ )
+ cd["__ne__"] = self._add_method_dunders(_make_ne())
+
+ return self
+
+ def add_order(self):
+ cd = self._cls_dict
+
+ cd["__lt__"], cd["__le__"], cd["__gt__"], cd["__ge__"] = (
+ self._add_method_dunders(meth)
+ for meth in _make_order(self._cls, self._attrs)
+ )
+
+ return self
+
+ def add_setattr(self):
+ if self._frozen:
+ return self
+
+ sa_attrs = {}
+ for a in self._attrs:
+ on_setattr = a.on_setattr or self._on_setattr
+ if on_setattr and on_setattr is not setters.NO_OP:
+ sa_attrs[a.name] = a, on_setattr
+
+ if not sa_attrs:
+ return self
+
+ if self._has_custom_setattr:
+ # We need to write a __setattr__ but there already is one!
+ msg = "Can't combine custom __setattr__ with on_setattr hooks."
+ raise ValueError(msg)
+
+ # docstring comes from _add_method_dunders
+ def __setattr__(self, name, val):
+ try:
+ a, hook = sa_attrs[name]
+ except KeyError:
+ nval = val
+ else:
+ nval = hook(self, a, val)
+
+ _OBJ_SETATTR(self, name, nval)
+
+ self._cls_dict["__attrs_own_setattr__"] = True
+ self._cls_dict["__setattr__"] = self._add_method_dunders(__setattr__)
+ self._wrote_own_setattr = True
+
+ return self
+
+ def _add_method_dunders(self, method):
+ """
+ Add __module__ and __qualname__ to a *method* if possible.
+ """
+ with contextlib.suppress(AttributeError):
+ method.__module__ = self._cls.__module__
+
+ with contextlib.suppress(AttributeError):
+ method.__qualname__ = f"{self._cls.__qualname__}.{method.__name__}"
+
+ with contextlib.suppress(AttributeError):
+ method.__doc__ = (
+ "Method generated by attrs for class "
+ f"{self._cls.__qualname__}."
+ )
+
+ return method
+
+
+def _determine_attrs_eq_order(cmp, eq, order, default_eq):
+ """
+ Validate the combination of *cmp*, *eq*, and *order*. Derive the effective
+ values of eq and order. If *eq* is None, set it to *default_eq*.
+ """
+ if cmp is not None and any((eq is not None, order is not None)):
+ msg = "Don't mix `cmp` with `eq' and `order`."
+ raise ValueError(msg)
+
+ # cmp takes precedence due to bw-compatibility.
+ if cmp is not None:
+ return cmp, cmp
+
+ # If left None, equality is set to the specified default and ordering
+ # mirrors equality.
+ if eq is None:
+ eq = default_eq
+
+ if order is None:
+ order = eq
+
+ if eq is False and order is True:
+ msg = "`order` can only be True if `eq` is True too."
+ raise ValueError(msg)
+
+ return eq, order
+
+
+def _determine_attrib_eq_order(cmp, eq, order, default_eq):
+ """
+ Validate the combination of *cmp*, *eq*, and *order*. Derive the effective
+ values of eq and order. If *eq* is None, set it to *default_eq*.
+ """
+ if cmp is not None and any((eq is not None, order is not None)):
+ msg = "Don't mix `cmp` with `eq' and `order`."
+ raise ValueError(msg)
+
+ def decide_callable_or_boolean(value):
+ """
+ Decide whether a key function is used.
+ """
+ if callable(value):
+ value, key = True, value
+ else:
+ key = None
+ return value, key
+
+ # cmp takes precedence due to bw-compatibility.
+ if cmp is not None:
+ cmp, cmp_key = decide_callable_or_boolean(cmp)
+ return cmp, cmp_key, cmp, cmp_key
+
+ # If left None, equality is set to the specified default and ordering
+ # mirrors equality.
+ if eq is None:
+ eq, eq_key = default_eq, None
+ else:
+ eq, eq_key = decide_callable_or_boolean(eq)
+
+ if order is None:
+ order, order_key = eq, eq_key
+ else:
+ order, order_key = decide_callable_or_boolean(order)
+
+ if eq is False and order is True:
+ msg = "`order` can only be True if `eq` is True too."
+ raise ValueError(msg)
+
+ return eq, eq_key, order, order_key
+
+
+def _determine_whether_to_implement(
+ cls, flag, auto_detect, dunders, default=True
+):
+ """
+ Check whether we should implement a set of methods for *cls*.
+
+ *flag* is the argument passed into @attr.s like 'init', *auto_detect* the
+ same as passed into @attr.s and *dunders* is a tuple of attribute names
+ whose presence signal that the user has implemented it themselves.
+
+ Return *default* if no reason for either for or against is found.
+ """
+ if flag is True or flag is False:
+ return flag
+
+ if flag is None and auto_detect is False:
+ return default
+
+ # Logically, flag is None and auto_detect is True here.
+ for dunder in dunders:
+ if _has_own_attribute(cls, dunder):
+ return False
+
+ return default
+
+
+def attrs(
+ maybe_cls=None,
+ these=None,
+ repr_ns=None,
+ repr=None,
+ cmp=None,
+ hash=None,
+ init=None,
+ slots=False,
+ frozen=False,
+ weakref_slot=True,
+ str=False,
+ auto_attribs=False,
+ kw_only=False,
+ cache_hash=False,
+ auto_exc=False,
+ eq=None,
+ order=None,
+ auto_detect=False,
+ collect_by_mro=False,
+ getstate_setstate=None,
+ on_setattr=None,
+ field_transformer=None,
+ match_args=True,
+ unsafe_hash=None,
+):
+ r"""
+ A class decorator that adds :term:`dunder methods` according to the
+ specified attributes using `attr.ib` or the *these* argument.
+
+ Consider using `attrs.define` / `attrs.frozen` in new code (``attr.s`` will
+ *never* go away, though).
+
+ Args:
+ repr_ns (str):
+ When using nested classes, there was no way in Python 2 to
+ automatically detect that. This argument allows to set a custom
+ name for a more meaningful ``repr`` output. This argument is
+ pointless in Python 3 and is therefore deprecated.
+
+ .. caution::
+ Refer to `attrs.define` for the rest of the parameters, but note that they
+ can have different defaults.
+
+ Notably, leaving *on_setattr* as `None` will **not** add any hooks.
+
+ .. versionadded:: 16.0.0 *slots*
+ .. versionadded:: 16.1.0 *frozen*
+ .. versionadded:: 16.3.0 *str*
+ .. versionadded:: 16.3.0 Support for ``__attrs_post_init__``.
+ .. versionchanged:: 17.1.0
+ *hash* supports `None` as value which is also the default now.
+ .. versionadded:: 17.3.0 *auto_attribs*
+ .. versionchanged:: 18.1.0
+ If *these* is passed, no attributes are deleted from the class body.
+ .. versionchanged:: 18.1.0 If *these* is ordered, the order is retained.
+ .. versionadded:: 18.2.0 *weakref_slot*
+ .. deprecated:: 18.2.0
+ ``__lt__``, ``__le__``, ``__gt__``, and ``__ge__`` now raise a
+ `DeprecationWarning` if the classes compared are subclasses of
+ each other. ``__eq`` and ``__ne__`` never tried to compared subclasses
+ to each other.
+ .. versionchanged:: 19.2.0
+ ``__lt__``, ``__le__``, ``__gt__``, and ``__ge__`` now do not consider
+ subclasses comparable anymore.
+ .. versionadded:: 18.2.0 *kw_only*
+ .. versionadded:: 18.2.0 *cache_hash*
+ .. versionadded:: 19.1.0 *auto_exc*
+ .. deprecated:: 19.2.0 *cmp* Removal on or after 2021-06-01.
+ .. versionadded:: 19.2.0 *eq* and *order*
+ .. versionadded:: 20.1.0 *auto_detect*
+ .. versionadded:: 20.1.0 *collect_by_mro*
+ .. versionadded:: 20.1.0 *getstate_setstate*
+ .. versionadded:: 20.1.0 *on_setattr*
+ .. versionadded:: 20.3.0 *field_transformer*
+ .. versionchanged:: 21.1.0
+ ``init=False`` injects ``__attrs_init__``
+ .. versionchanged:: 21.1.0 Support for ``__attrs_pre_init__``
+ .. versionchanged:: 21.1.0 *cmp* undeprecated
+ .. versionadded:: 21.3.0 *match_args*
+ .. versionadded:: 22.2.0
+ *unsafe_hash* as an alias for *hash* (for :pep:`681` compliance).
+ .. deprecated:: 24.1.0 *repr_ns*
+ .. versionchanged:: 24.1.0
+ Instances are not compared as tuples of attributes anymore, but using a
+ big ``and`` condition. This is faster and has more correct behavior for
+ uncomparable values like `math.nan`.
+ .. versionadded:: 24.1.0
+ If a class has an *inherited* classmethod called
+ ``__attrs_init_subclass__``, it is executed after the class is created.
+ .. deprecated:: 24.1.0 *hash* is deprecated in favor of *unsafe_hash*.
+ """
+ if repr_ns is not None:
+ import warnings
+
+ warnings.warn(
+ DeprecationWarning(
+ "The `repr_ns` argument is deprecated and will be removed in or after August 2025."
+ ),
+ stacklevel=2,
+ )
+
+ eq_, order_ = _determine_attrs_eq_order(cmp, eq, order, None)
+
+ # unsafe_hash takes precedence due to PEP 681.
+ if unsafe_hash is not None:
+ hash = unsafe_hash
+
+ if isinstance(on_setattr, (list, tuple)):
+ on_setattr = setters.pipe(*on_setattr)
+
+ def wrap(cls):
+ is_frozen = frozen or _has_frozen_base_class(cls)
+ is_exc = auto_exc is True and issubclass(cls, BaseException)
+ has_own_setattr = auto_detect and _has_own_attribute(
+ cls, "__setattr__"
+ )
+
+ if has_own_setattr and is_frozen:
+ msg = "Can't freeze a class with a custom __setattr__."
+ raise ValueError(msg)
+
+ builder = _ClassBuilder(
+ cls,
+ these,
+ slots,
+ is_frozen,
+ weakref_slot,
+ _determine_whether_to_implement(
+ cls,
+ getstate_setstate,
+ auto_detect,
+ ("__getstate__", "__setstate__"),
+ default=slots,
+ ),
+ auto_attribs,
+ kw_only,
+ cache_hash,
+ is_exc,
+ collect_by_mro,
+ on_setattr,
+ has_own_setattr,
+ field_transformer,
+ )
+ if _determine_whether_to_implement(
+ cls, repr, auto_detect, ("__repr__",)
+ ):
+ builder.add_repr(repr_ns)
+ if str is True:
+ builder.add_str()
+
+ eq = _determine_whether_to_implement(
+ cls, eq_, auto_detect, ("__eq__", "__ne__")
+ )
+ if not is_exc and eq is True:
+ builder.add_eq()
+ if not is_exc and _determine_whether_to_implement(
+ cls, order_, auto_detect, ("__lt__", "__le__", "__gt__", "__ge__")
+ ):
+ builder.add_order()
+
+ builder.add_setattr()
+
+ nonlocal hash
+ if (
+ hash is None
+ and auto_detect is True
+ and _has_own_attribute(cls, "__hash__")
+ ):
+ hash = False
+
+ if hash is not True and hash is not False and hash is not None:
+ # Can't use `hash in` because 1 == True for example.
+ msg = "Invalid value for hash. Must be True, False, or None."
+ raise TypeError(msg)
+
+ if hash is False or (hash is None and eq is False) or is_exc:
+ # Don't do anything. Should fall back to __object__'s __hash__
+ # which is by id.
+ if cache_hash:
+ msg = "Invalid value for cache_hash. To use hash caching, hashing must be either explicitly or implicitly enabled."
+ raise TypeError(msg)
+ elif hash is True or (
+ hash is None and eq is True and is_frozen is True
+ ):
+ # Build a __hash__ if told so, or if it's safe.
+ builder.add_hash()
+ else:
+ # Raise TypeError on attempts to hash.
+ if cache_hash:
+ msg = "Invalid value for cache_hash. To use hash caching, hashing must be either explicitly or implicitly enabled."
+ raise TypeError(msg)
+ builder.make_unhashable()
+
+ if _determine_whether_to_implement(
+ cls, init, auto_detect, ("__init__",)
+ ):
+ builder.add_init()
+ else:
+ builder.add_attrs_init()
+ if cache_hash:
+ msg = "Invalid value for cache_hash. To use hash caching, init must be True."
+ raise TypeError(msg)
+
+ if (
+ PY_3_10_PLUS
+ and match_args
+ and not _has_own_attribute(cls, "__match_args__")
+ ):
+ builder.add_match_args()
+
+ return builder.build_class()
+
+ # maybe_cls's type depends on the usage of the decorator. It's a class
+ # if it's used as `@attrs` but `None` if used as `@attrs()`.
+ if maybe_cls is None:
+ return wrap
+
+ return wrap(maybe_cls)
+
+
+_attrs = attrs
+"""
+Internal alias so we can use it in functions that take an argument called
+*attrs*.
+"""
+
+
+def _has_frozen_base_class(cls):
+ """
+ Check whether *cls* has a frozen ancestor by looking at its
+ __setattr__.
+ """
+ return cls.__setattr__ is _frozen_setattrs
+
+
+def _generate_unique_filename(cls, func_name):
+ """
+ Create a "filename" suitable for a function being generated.
+ """
+ return (
+ f""
+ )
+
+
+def _make_hash(cls, attrs, frozen, cache_hash):
+ attrs = tuple(
+ a for a in attrs if a.hash is True or (a.hash is None and a.eq is True)
+ )
+
+ tab = " "
+
+ unique_filename = _generate_unique_filename(cls, "hash")
+ type_hash = hash(unique_filename)
+ # If eq is custom generated, we need to include the functions in globs
+ globs = {}
+
+ hash_def = "def __hash__(self"
+ hash_func = "hash(("
+ closing_braces = "))"
+ if not cache_hash:
+ hash_def += "):"
+ else:
+ hash_def += ", *"
+
+ hash_def += ", _cache_wrapper=__import__('attr._make')._make._CacheHashWrapper):"
+ hash_func = "_cache_wrapper(" + hash_func
+ closing_braces += ")"
+
+ method_lines = [hash_def]
+
+ def append_hash_computation_lines(prefix, indent):
+ """
+ Generate the code for actually computing the hash code.
+ Below this will either be returned directly or used to compute
+ a value which is then cached, depending on the value of cache_hash
+ """
+
+ method_lines.extend(
+ [
+ indent + prefix + hash_func,
+ indent + f" {type_hash},",
+ ]
+ )
+
+ for a in attrs:
+ if a.eq_key:
+ cmp_name = f"_{a.name}_key"
+ globs[cmp_name] = a.eq_key
+ method_lines.append(
+ indent + f" {cmp_name}(self.{a.name}),"
+ )
+ else:
+ method_lines.append(indent + f" self.{a.name},")
+
+ method_lines.append(indent + " " + closing_braces)
+
+ if cache_hash:
+ method_lines.append(tab + f"if self.{_HASH_CACHE_FIELD} is None:")
+ if frozen:
+ append_hash_computation_lines(
+ f"object.__setattr__(self, '{_HASH_CACHE_FIELD}', ", tab * 2
+ )
+ method_lines.append(tab * 2 + ")") # close __setattr__
+ else:
+ append_hash_computation_lines(
+ f"self.{_HASH_CACHE_FIELD} = ", tab * 2
+ )
+ method_lines.append(tab + f"return self.{_HASH_CACHE_FIELD}")
+ else:
+ append_hash_computation_lines("return ", tab)
+
+ script = "\n".join(method_lines)
+ return _make_method("__hash__", script, unique_filename, globs)
+
+
+def _add_hash(cls, attrs):
+ """
+ Add a hash method to *cls*.
+ """
+ cls.__hash__ = _make_hash(cls, attrs, frozen=False, cache_hash=False)
+ return cls
+
+
+def _make_ne():
+ """
+ Create __ne__ method.
+ """
+
+ def __ne__(self, other):
+ """
+ Check equality and either forward a NotImplemented or
+ return the result negated.
+ """
+ result = self.__eq__(other)
+ if result is NotImplemented:
+ return NotImplemented
+
+ return not result
+
+ return __ne__
+
+
+def _make_eq(cls, attrs):
+ """
+ Create __eq__ method for *cls* with *attrs*.
+ """
+ attrs = [a for a in attrs if a.eq]
+
+ unique_filename = _generate_unique_filename(cls, "eq")
+ lines = [
+ "def __eq__(self, other):",
+ " if other.__class__ is not self.__class__:",
+ " return NotImplemented",
+ ]
+
+ # We can't just do a big self.x = other.x and... clause due to
+ # irregularities like nan == nan is false but (nan,) == (nan,) is true.
+ globs = {}
+ if attrs:
+ lines.append(" return (")
+ for a in attrs:
+ if a.eq_key:
+ cmp_name = f"_{a.name}_key"
+ # Add the key function to the global namespace
+ # of the evaluated function.
+ globs[cmp_name] = a.eq_key
+ lines.append(
+ f" {cmp_name}(self.{a.name}) == {cmp_name}(other.{a.name})"
+ )
+ else:
+ lines.append(f" self.{a.name} == other.{a.name}")
+ if a is not attrs[-1]:
+ lines[-1] = f"{lines[-1]} and"
+ lines.append(" )")
+ else:
+ lines.append(" return True")
+
+ script = "\n".join(lines)
+
+ return _make_method("__eq__", script, unique_filename, globs)
+
+
+def _make_order(cls, attrs):
+ """
+ Create ordering methods for *cls* with *attrs*.
+ """
+ attrs = [a for a in attrs if a.order]
+
+ def attrs_to_tuple(obj):
+ """
+ Save us some typing.
+ """
+ return tuple(
+ key(value) if key else value
+ for value, key in (
+ (getattr(obj, a.name), a.order_key) for a in attrs
+ )
+ )
+
+ def __lt__(self, other):
+ """
+ Automatically created by attrs.
+ """
+ if other.__class__ is self.__class__:
+ return attrs_to_tuple(self) < attrs_to_tuple(other)
+
+ return NotImplemented
+
+ def __le__(self, other):
+ """
+ Automatically created by attrs.
+ """
+ if other.__class__ is self.__class__:
+ return attrs_to_tuple(self) <= attrs_to_tuple(other)
+
+ return NotImplemented
+
+ def __gt__(self, other):
+ """
+ Automatically created by attrs.
+ """
+ if other.__class__ is self.__class__:
+ return attrs_to_tuple(self) > attrs_to_tuple(other)
+
+ return NotImplemented
+
+ def __ge__(self, other):
+ """
+ Automatically created by attrs.
+ """
+ if other.__class__ is self.__class__:
+ return attrs_to_tuple(self) >= attrs_to_tuple(other)
+
+ return NotImplemented
+
+ return __lt__, __le__, __gt__, __ge__
+
+
+def _add_eq(cls, attrs=None):
+ """
+ Add equality methods to *cls* with *attrs*.
+ """
+ if attrs is None:
+ attrs = cls.__attrs_attrs__
+
+ cls.__eq__ = _make_eq(cls, attrs)
+ cls.__ne__ = _make_ne()
+
+ return cls
+
+
+def _make_repr(attrs, ns, cls):
+ unique_filename = _generate_unique_filename(cls, "repr")
+ # Figure out which attributes to include, and which function to use to
+ # format them. The a.repr value can be either bool or a custom
+ # callable.
+ attr_names_with_reprs = tuple(
+ (a.name, (repr if a.repr is True else a.repr), a.init)
+ for a in attrs
+ if a.repr is not False
+ )
+ globs = {
+ name + "_repr": r for name, r, _ in attr_names_with_reprs if r != repr
+ }
+ globs["_compat"] = _compat
+ globs["AttributeError"] = AttributeError
+ globs["NOTHING"] = NOTHING
+ attribute_fragments = []
+ for name, r, i in attr_names_with_reprs:
+ accessor = (
+ "self." + name if i else 'getattr(self, "' + name + '", NOTHING)'
+ )
+ fragment = (
+ "%s={%s!r}" % (name, accessor)
+ if r == repr
+ else "%s={%s_repr(%s)}" % (name, name, accessor)
+ )
+ attribute_fragments.append(fragment)
+ repr_fragment = ", ".join(attribute_fragments)
+
+ if ns is None:
+ cls_name_fragment = '{self.__class__.__qualname__.rsplit(">.", 1)[-1]}'
+ else:
+ cls_name_fragment = ns + ".{self.__class__.__name__}"
+
+ lines = [
+ "def __repr__(self):",
+ " try:",
+ " already_repring = _compat.repr_context.already_repring",
+ " except AttributeError:",
+ " already_repring = {id(self),}",
+ " _compat.repr_context.already_repring = already_repring",
+ " else:",
+ " if id(self) in already_repring:",
+ " return '...'",
+ " else:",
+ " already_repring.add(id(self))",
+ " try:",
+ f" return f'{cls_name_fragment}({repr_fragment})'",
+ " finally:",
+ " already_repring.remove(id(self))",
+ ]
+
+ return _make_method(
+ "__repr__", "\n".join(lines), unique_filename, globs=globs
+ )
+
+
+def _add_repr(cls, ns=None, attrs=None):
+ """
+ Add a repr method to *cls*.
+ """
+ if attrs is None:
+ attrs = cls.__attrs_attrs__
+
+ cls.__repr__ = _make_repr(attrs, ns, cls)
+ return cls
+
+
+def fields(cls):
+ """
+ Return the tuple of *attrs* attributes for a class.
+
+ The tuple also allows accessing the fields by their names (see below for
+ examples).
+
+ Args:
+ cls (type): Class to introspect.
+
+ Raises:
+ TypeError: If *cls* is not a class.
+
+ attrs.exceptions.NotAnAttrsClassError:
+ If *cls* is not an *attrs* class.
+
+ Returns:
+ tuple (with name accessors) of `attrs.Attribute`
+
+ .. versionchanged:: 16.2.0 Returned tuple allows accessing the fields
+ by name.
+ .. versionchanged:: 23.1.0 Add support for generic classes.
+ """
+ generic_base = get_generic_base(cls)
+
+ if generic_base is None and not isinstance(cls, type):
+ msg = "Passed object must be a class."
+ raise TypeError(msg)
+
+ attrs = getattr(cls, "__attrs_attrs__", None)
+
+ if attrs is None:
+ if generic_base is not None:
+ attrs = getattr(generic_base, "__attrs_attrs__", None)
+ if attrs is not None:
+ # Even though this is global state, stick it on here to speed
+ # it up. We rely on `cls` being cached for this to be
+ # efficient.
+ cls.__attrs_attrs__ = attrs
+ return attrs
+ msg = f"{cls!r} is not an attrs-decorated class."
+ raise NotAnAttrsClassError(msg)
+
+ return attrs
+
+
+def fields_dict(cls):
+ """
+ Return an ordered dictionary of *attrs* attributes for a class, whose keys
+ are the attribute names.
+
+ Args:
+ cls (type): Class to introspect.
+
+ Raises:
+ TypeError: If *cls* is not a class.
+
+ attrs.exceptions.NotAnAttrsClassError:
+ If *cls* is not an *attrs* class.
+
+ Returns:
+ dict[str, attrs.Attribute]: Dict of attribute name to definition
+
+ .. versionadded:: 18.1.0
+ """
+ if not isinstance(cls, type):
+ msg = "Passed object must be a class."
+ raise TypeError(msg)
+ attrs = getattr(cls, "__attrs_attrs__", None)
+ if attrs is None:
+ msg = f"{cls!r} is not an attrs-decorated class."
+ raise NotAnAttrsClassError(msg)
+ return {a.name: a for a in attrs}
+
+
+def validate(inst):
+ """
+ Validate all attributes on *inst* that have a validator.
+
+ Leaves all exceptions through.
+
+ Args:
+ inst: Instance of a class with *attrs* attributes.
+ """
+ if _config._run_validators is False:
+ return
+
+ for a in fields(inst.__class__):
+ v = a.validator
+ if v is not None:
+ v(inst, a, getattr(inst, a.name))
+
+
+def _is_slot_attr(a_name, base_attr_map):
+ """
+ Check if the attribute name comes from a slot class.
+ """
+ cls = base_attr_map.get(a_name)
+ return cls and "__slots__" in cls.__dict__
+
+
+def _make_init(
+ cls,
+ attrs,
+ pre_init,
+ pre_init_has_args,
+ post_init,
+ frozen,
+ slots,
+ cache_hash,
+ base_attr_map,
+ is_exc,
+ cls_on_setattr,
+ attrs_init,
+):
+ has_cls_on_setattr = (
+ cls_on_setattr is not None and cls_on_setattr is not setters.NO_OP
+ )
+
+ if frozen and has_cls_on_setattr:
+ msg = "Frozen classes can't use on_setattr."
+ raise ValueError(msg)
+
+ needs_cached_setattr = cache_hash or frozen
+ filtered_attrs = []
+ attr_dict = {}
+ for a in attrs:
+ if not a.init and a.default is NOTHING:
+ continue
+
+ filtered_attrs.append(a)
+ attr_dict[a.name] = a
+
+ if a.on_setattr is not None:
+ if frozen is True:
+ msg = "Frozen classes can't use on_setattr."
+ raise ValueError(msg)
+
+ needs_cached_setattr = True
+ elif has_cls_on_setattr and a.on_setattr is not setters.NO_OP:
+ needs_cached_setattr = True
+
+ unique_filename = _generate_unique_filename(cls, "init")
+
+ script, globs, annotations = _attrs_to_init_script(
+ filtered_attrs,
+ frozen,
+ slots,
+ pre_init,
+ pre_init_has_args,
+ post_init,
+ cache_hash,
+ base_attr_map,
+ is_exc,
+ needs_cached_setattr,
+ has_cls_on_setattr,
+ "__attrs_init__" if attrs_init else "__init__",
+ )
+ if cls.__module__ in sys.modules:
+ # This makes typing.get_type_hints(CLS.__init__) resolve string types.
+ globs.update(sys.modules[cls.__module__].__dict__)
+
+ globs.update({"NOTHING": NOTHING, "attr_dict": attr_dict})
+
+ if needs_cached_setattr:
+ # Save the lookup overhead in __init__ if we need to circumvent
+ # setattr hooks.
+ globs["_cached_setattr_get"] = _OBJ_SETATTR.__get__
+
+ init = _make_method(
+ "__attrs_init__" if attrs_init else "__init__",
+ script,
+ unique_filename,
+ globs,
+ )
+ init.__annotations__ = annotations
+
+ return init
+
+
+def _setattr(attr_name: str, value_var: str, has_on_setattr: bool) -> str:
+ """
+ Use the cached object.setattr to set *attr_name* to *value_var*.
+ """
+ return f"_setattr('{attr_name}', {value_var})"
+
+
+def _setattr_with_converter(
+ attr_name: str, value_var: str, has_on_setattr: bool, converter: Converter
+) -> str:
+ """
+ Use the cached object.setattr to set *attr_name* to *value_var*, but run
+ its converter first.
+ """
+ return f"_setattr('{attr_name}', {converter._fmt_converter_call(attr_name, value_var)})"
+
+
+def _assign(attr_name: str, value: str, has_on_setattr: bool) -> str:
+ """
+ Unless *attr_name* has an on_setattr hook, use normal assignment. Otherwise
+ relegate to _setattr.
+ """
+ if has_on_setattr:
+ return _setattr(attr_name, value, True)
+
+ return f"self.{attr_name} = {value}"
+
+
+def _assign_with_converter(
+ attr_name: str, value_var: str, has_on_setattr: bool, converter: Converter
+) -> str:
+ """
+ Unless *attr_name* has an on_setattr hook, use normal assignment after
+ conversion. Otherwise relegate to _setattr_with_converter.
+ """
+ if has_on_setattr:
+ return _setattr_with_converter(attr_name, value_var, True, converter)
+
+ return f"self.{attr_name} = {converter._fmt_converter_call(attr_name, value_var)}"
+
+
+def _determine_setters(
+ frozen: bool, slots: bool, base_attr_map: dict[str, type]
+):
+ """
+ Determine the correct setter functions based on whether a class is frozen
+ and/or slotted.
+ """
+ if frozen is True:
+ if slots is True:
+ return (), _setattr, _setattr_with_converter
+
+ # Dict frozen classes assign directly to __dict__.
+ # But only if the attribute doesn't come from an ancestor slot
+ # class.
+ # Note _inst_dict will be used again below if cache_hash is True
+
+ def fmt_setter(
+ attr_name: str, value_var: str, has_on_setattr: bool
+ ) -> str:
+ if _is_slot_attr(attr_name, base_attr_map):
+ return _setattr(attr_name, value_var, has_on_setattr)
+
+ return f"_inst_dict['{attr_name}'] = {value_var}"
+
+ def fmt_setter_with_converter(
+ attr_name: str,
+ value_var: str,
+ has_on_setattr: bool,
+ converter: Converter,
+ ) -> str:
+ if has_on_setattr or _is_slot_attr(attr_name, base_attr_map):
+ return _setattr_with_converter(
+ attr_name, value_var, has_on_setattr, converter
+ )
+
+ return f"_inst_dict['{attr_name}'] = {converter._fmt_converter_call(attr_name, value_var)}"
+
+ return (
+ ("_inst_dict = self.__dict__",),
+ fmt_setter,
+ fmt_setter_with_converter,
+ )
+
+ # Not frozen -- we can just assign directly.
+ return (), _assign, _assign_with_converter
+
+
+def _attrs_to_init_script(
+ attrs: list[Attribute],
+ is_frozen: bool,
+ is_slotted: bool,
+ call_pre_init: bool,
+ pre_init_has_args: bool,
+ call_post_init: bool,
+ does_cache_hash: bool,
+ base_attr_map: dict[str, type],
+ is_exc: bool,
+ needs_cached_setattr: bool,
+ has_cls_on_setattr: bool,
+ method_name: str,
+) -> tuple[str, dict, dict]:
+ """
+ Return a script of an initializer for *attrs*, a dict of globals, and
+ annotations for the initializer.
+
+ The globals are required by the generated script.
+ """
+ lines = ["self.__attrs_pre_init__()"] if call_pre_init else []
+
+ if needs_cached_setattr:
+ lines.append(
+ # Circumvent the __setattr__ descriptor to save one lookup per
+ # assignment. Note _setattr will be used again below if
+ # does_cache_hash is True.
+ "_setattr = _cached_setattr_get(self)"
+ )
+
+ extra_lines, fmt_setter, fmt_setter_with_converter = _determine_setters(
+ is_frozen, is_slotted, base_attr_map
+ )
+ lines.extend(extra_lines)
+
+ args = []
+ kw_only_args = []
+ attrs_to_validate = []
+
+ # This is a dictionary of names to validator and converter callables.
+ # Injecting this into __init__ globals lets us avoid lookups.
+ names_for_globals = {}
+ annotations = {"return": None}
+
+ for a in attrs:
+ if a.validator:
+ attrs_to_validate.append(a)
+
+ attr_name = a.name
+ has_on_setattr = a.on_setattr is not None or (
+ a.on_setattr is not setters.NO_OP and has_cls_on_setattr
+ )
+ # a.alias is set to maybe-mangled attr_name in _ClassBuilder if not
+ # explicitly provided
+ arg_name = a.alias
+
+ has_factory = isinstance(a.default, Factory)
+ maybe_self = "self" if has_factory and a.default.takes_self else ""
+
+ if a.converter and not isinstance(a.converter, Converter):
+ converter = Converter(a.converter)
+ else:
+ converter = a.converter
+
+ if a.init is False:
+ if has_factory:
+ init_factory_name = _INIT_FACTORY_PAT % (a.name,)
+ if converter is not None:
+ lines.append(
+ fmt_setter_with_converter(
+ attr_name,
+ init_factory_name + f"({maybe_self})",
+ has_on_setattr,
+ converter,
+ )
+ )
+ names_for_globals[converter._get_global_name(a.name)] = (
+ converter.converter
+ )
+ else:
+ lines.append(
+ fmt_setter(
+ attr_name,
+ init_factory_name + f"({maybe_self})",
+ has_on_setattr,
+ )
+ )
+ names_for_globals[init_factory_name] = a.default.factory
+ elif converter is not None:
+ lines.append(
+ fmt_setter_with_converter(
+ attr_name,
+ f"attr_dict['{attr_name}'].default",
+ has_on_setattr,
+ converter,
+ )
+ )
+ names_for_globals[converter._get_global_name(a.name)] = (
+ converter.converter
+ )
+ else:
+ lines.append(
+ fmt_setter(
+ attr_name,
+ f"attr_dict['{attr_name}'].default",
+ has_on_setattr,
+ )
+ )
+ elif a.default is not NOTHING and not has_factory:
+ arg = f"{arg_name}=attr_dict['{attr_name}'].default"
+ if a.kw_only:
+ kw_only_args.append(arg)
+ else:
+ args.append(arg)
+
+ if converter is not None:
+ lines.append(
+ fmt_setter_with_converter(
+ attr_name, arg_name, has_on_setattr, converter
+ )
+ )
+ names_for_globals[converter._get_global_name(a.name)] = (
+ converter.converter
+ )
+ else:
+ lines.append(fmt_setter(attr_name, arg_name, has_on_setattr))
+
+ elif has_factory:
+ arg = f"{arg_name}=NOTHING"
+ if a.kw_only:
+ kw_only_args.append(arg)
+ else:
+ args.append(arg)
+ lines.append(f"if {arg_name} is not NOTHING:")
+
+ init_factory_name = _INIT_FACTORY_PAT % (a.name,)
+ if converter is not None:
+ lines.append(
+ " "
+ + fmt_setter_with_converter(
+ attr_name, arg_name, has_on_setattr, converter
+ )
+ )
+ lines.append("else:")
+ lines.append(
+ " "
+ + fmt_setter_with_converter(
+ attr_name,
+ init_factory_name + "(" + maybe_self + ")",
+ has_on_setattr,
+ converter,
+ )
+ )
+ names_for_globals[converter._get_global_name(a.name)] = (
+ converter.converter
+ )
+ else:
+ lines.append(
+ " " + fmt_setter(attr_name, arg_name, has_on_setattr)
+ )
+ lines.append("else:")
+ lines.append(
+ " "
+ + fmt_setter(
+ attr_name,
+ init_factory_name + "(" + maybe_self + ")",
+ has_on_setattr,
+ )
+ )
+ names_for_globals[init_factory_name] = a.default.factory
+ else:
+ if a.kw_only:
+ kw_only_args.append(arg_name)
+ else:
+ args.append(arg_name)
+
+ if converter is not None:
+ lines.append(
+ fmt_setter_with_converter(
+ attr_name, arg_name, has_on_setattr, converter
+ )
+ )
+ names_for_globals[converter._get_global_name(a.name)] = (
+ converter.converter
+ )
+ else:
+ lines.append(fmt_setter(attr_name, arg_name, has_on_setattr))
+
+ if a.init is True:
+ if a.type is not None and converter is None:
+ annotations[arg_name] = a.type
+ elif converter is not None and converter._first_param_type:
+ # Use the type from the converter if present.
+ annotations[arg_name] = converter._first_param_type
+
+ if attrs_to_validate: # we can skip this if there are no validators.
+ names_for_globals["_config"] = _config
+ lines.append("if _config._run_validators is True:")
+ for a in attrs_to_validate:
+ val_name = "__attr_validator_" + a.name
+ attr_name = "__attr_" + a.name
+ lines.append(f" {val_name}(self, {attr_name}, self.{a.name})")
+ names_for_globals[val_name] = a.validator
+ names_for_globals[attr_name] = a
+
+ if call_post_init:
+ lines.append("self.__attrs_post_init__()")
+
+ # Because this is set only after __attrs_post_init__ is called, a crash
+ # will result if post-init tries to access the hash code. This seemed
+ # preferable to setting this beforehand, in which case alteration to field
+ # values during post-init combined with post-init accessing the hash code
+ # would result in silent bugs.
+ if does_cache_hash:
+ if is_frozen:
+ if is_slotted:
+ init_hash_cache = f"_setattr('{_HASH_CACHE_FIELD}', None)"
+ else:
+ init_hash_cache = f"_inst_dict['{_HASH_CACHE_FIELD}'] = None"
+ else:
+ init_hash_cache = f"self.{_HASH_CACHE_FIELD} = None"
+ lines.append(init_hash_cache)
+
+ # For exceptions we rely on BaseException.__init__ for proper
+ # initialization.
+ if is_exc:
+ vals = ",".join(f"self.{a.name}" for a in attrs if a.init)
+
+ lines.append(f"BaseException.__init__(self, {vals})")
+
+ args = ", ".join(args)
+ pre_init_args = args
+ if kw_only_args:
+ # leading comma & kw_only args
+ args += f"{', ' if args else ''}*, {', '.join(kw_only_args)}"
+ pre_init_kw_only_args = ", ".join(
+ [
+ f"{kw_arg_name}={kw_arg_name}"
+ # We need to remove the defaults from the kw_only_args.
+ for kw_arg_name in (kwa.split("=")[0] for kwa in kw_only_args)
+ ]
+ )
+ pre_init_args += ", " if pre_init_args else ""
+ pre_init_args += pre_init_kw_only_args
+
+ if call_pre_init and pre_init_has_args:
+ # If pre init method has arguments, pass same arguments as `__init__`.
+ lines[0] = f"self.__attrs_pre_init__({pre_init_args})"
+
+ # Python 3.7 doesn't allow backslashes in f strings.
+ NL = "\n "
+ return (
+ f"""def {method_name}(self, {args}):
+ {NL.join(lines) if lines else 'pass'}
+""",
+ names_for_globals,
+ annotations,
+ )
+
+
+def _default_init_alias_for(name: str) -> str:
+ """
+ The default __init__ parameter name for a field.
+
+ This performs private-name adjustment via leading-unscore stripping,
+ and is the default value of Attribute.alias if not provided.
+ """
+
+ return name.lstrip("_")
+
+
+class Attribute:
+ """
+ *Read-only* representation of an attribute.
+
+ .. warning::
+
+ You should never instantiate this class yourself.
+
+ The class has *all* arguments of `attr.ib` (except for ``factory`` which is
+ only syntactic sugar for ``default=Factory(...)`` plus the following:
+
+ - ``name`` (`str`): The name of the attribute.
+ - ``alias`` (`str`): The __init__ parameter name of the attribute, after
+ any explicit overrides and default private-attribute-name handling.
+ - ``inherited`` (`bool`): Whether or not that attribute has been inherited
+ from a base class.
+ - ``eq_key`` and ``order_key`` (`typing.Callable` or `None`): The
+ callables that are used for comparing and ordering objects by this
+ attribute, respectively. These are set by passing a callable to
+ `attr.ib`'s ``eq``, ``order``, or ``cmp`` arguments. See also
+ :ref:`comparison customization `.
+
+ Instances of this class are frequently used for introspection purposes
+ like:
+
+ - `fields` returns a tuple of them.
+ - Validators get them passed as the first argument.
+ - The :ref:`field transformer ` hook receives a list of
+ them.
+ - The ``alias`` property exposes the __init__ parameter name of the field,
+ with any overrides and default private-attribute handling applied.
+
+
+ .. versionadded:: 20.1.0 *inherited*
+ .. versionadded:: 20.1.0 *on_setattr*
+ .. versionchanged:: 20.2.0 *inherited* is not taken into account for
+ equality checks and hashing anymore.
+ .. versionadded:: 21.1.0 *eq_key* and *order_key*
+ .. versionadded:: 22.2.0 *alias*
+
+ For the full version history of the fields, see `attr.ib`.
+ """
+
+ __slots__ = (
+ "name",
+ "default",
+ "validator",
+ "repr",
+ "eq",
+ "eq_key",
+ "order",
+ "order_key",
+ "hash",
+ "init",
+ "metadata",
+ "type",
+ "converter",
+ "kw_only",
+ "inherited",
+ "on_setattr",
+ "alias",
+ )
+
+ def __init__(
+ self,
+ name,
+ default,
+ validator,
+ repr,
+ cmp, # XXX: unused, remove along with other cmp code.
+ hash,
+ init,
+ inherited,
+ metadata=None,
+ type=None,
+ converter=None,
+ kw_only=False,
+ eq=None,
+ eq_key=None,
+ order=None,
+ order_key=None,
+ on_setattr=None,
+ alias=None,
+ ):
+ eq, eq_key, order, order_key = _determine_attrib_eq_order(
+ cmp, eq_key or eq, order_key or order, True
+ )
+
+ # Cache this descriptor here to speed things up later.
+ bound_setattr = _OBJ_SETATTR.__get__(self)
+
+ # Despite the big red warning, people *do* instantiate `Attribute`
+ # themselves.
+ bound_setattr("name", name)
+ bound_setattr("default", default)
+ bound_setattr("validator", validator)
+ bound_setattr("repr", repr)
+ bound_setattr("eq", eq)
+ bound_setattr("eq_key", eq_key)
+ bound_setattr("order", order)
+ bound_setattr("order_key", order_key)
+ bound_setattr("hash", hash)
+ bound_setattr("init", init)
+ bound_setattr("converter", converter)
+ bound_setattr(
+ "metadata",
+ (
+ types.MappingProxyType(dict(metadata)) # Shallow copy
+ if metadata
+ else _EMPTY_METADATA_SINGLETON
+ ),
+ )
+ bound_setattr("type", type)
+ bound_setattr("kw_only", kw_only)
+ bound_setattr("inherited", inherited)
+ bound_setattr("on_setattr", on_setattr)
+ bound_setattr("alias", alias)
+
+ def __setattr__(self, name, value):
+ raise FrozenInstanceError()
+
+ @classmethod
+ def from_counting_attr(cls, name, ca, type=None):
+ # type holds the annotated value. deal with conflicts:
+ if type is None:
+ type = ca.type
+ elif ca.type is not None:
+ msg = "Type annotation and type argument cannot both be present"
+ raise ValueError(msg)
+ inst_dict = {
+ k: getattr(ca, k)
+ for k in Attribute.__slots__
+ if k
+ not in (
+ "name",
+ "validator",
+ "default",
+ "type",
+ "inherited",
+ ) # exclude methods and deprecated alias
+ }
+ return cls(
+ name=name,
+ validator=ca._validator,
+ default=ca._default,
+ type=type,
+ cmp=None,
+ inherited=False,
+ **inst_dict,
+ )
+
+ # Don't use attrs.evolve since fields(Attribute) doesn't work
+ def evolve(self, **changes):
+ """
+ Copy *self* and apply *changes*.
+
+ This works similarly to `attrs.evolve` but that function does not work
+ with {class}`Attribute`.
+
+ It is mainly meant to be used for `transform-fields`.
+
+ .. versionadded:: 20.3.0
+ """
+ new = copy.copy(self)
+
+ new._setattrs(changes.items())
+
+ return new
+
+ # Don't use _add_pickle since fields(Attribute) doesn't work
+ def __getstate__(self):
+ """
+ Play nice with pickle.
+ """
+ return tuple(
+ getattr(self, name) if name != "metadata" else dict(self.metadata)
+ for name in self.__slots__
+ )
+
+ def __setstate__(self, state):
+ """
+ Play nice with pickle.
+ """
+ self._setattrs(zip(self.__slots__, state))
+
+ def _setattrs(self, name_values_pairs):
+ bound_setattr = _OBJ_SETATTR.__get__(self)
+ for name, value in name_values_pairs:
+ if name != "metadata":
+ bound_setattr(name, value)
+ else:
+ bound_setattr(
+ name,
+ (
+ types.MappingProxyType(dict(value))
+ if value
+ else _EMPTY_METADATA_SINGLETON
+ ),
+ )
+
+
+_a = [
+ Attribute(
+ name=name,
+ default=NOTHING,
+ validator=None,
+ repr=True,
+ cmp=None,
+ eq=True,
+ order=False,
+ hash=(name != "metadata"),
+ init=True,
+ inherited=False,
+ alias=_default_init_alias_for(name),
+ )
+ for name in Attribute.__slots__
+]
+
+Attribute = _add_hash(
+ _add_eq(
+ _add_repr(Attribute, attrs=_a),
+ attrs=[a for a in _a if a.name != "inherited"],
+ ),
+ attrs=[a for a in _a if a.hash and a.name != "inherited"],
+)
+
+
+class _CountingAttr:
+ """
+ Intermediate representation of attributes that uses a counter to preserve
+ the order in which the attributes have been defined.
+
+ *Internal* data structure of the attrs library. Running into is most
+ likely the result of a bug like a forgotten `@attr.s` decorator.
+ """
+
+ __slots__ = (
+ "counter",
+ "_default",
+ "repr",
+ "eq",
+ "eq_key",
+ "order",
+ "order_key",
+ "hash",
+ "init",
+ "metadata",
+ "_validator",
+ "converter",
+ "type",
+ "kw_only",
+ "on_setattr",
+ "alias",
+ )
+ __attrs_attrs__ = (
+ *tuple(
+ Attribute(
+ name=name,
+ alias=_default_init_alias_for(name),
+ default=NOTHING,
+ validator=None,
+ repr=True,
+ cmp=None,
+ hash=True,
+ init=True,
+ kw_only=False,
+ eq=True,
+ eq_key=None,
+ order=False,
+ order_key=None,
+ inherited=False,
+ on_setattr=None,
+ )
+ for name in (
+ "counter",
+ "_default",
+ "repr",
+ "eq",
+ "order",
+ "hash",
+ "init",
+ "on_setattr",
+ "alias",
+ )
+ ),
+ Attribute(
+ name="metadata",
+ alias="metadata",
+ default=None,
+ validator=None,
+ repr=True,
+ cmp=None,
+ hash=False,
+ init=True,
+ kw_only=False,
+ eq=True,
+ eq_key=None,
+ order=False,
+ order_key=None,
+ inherited=False,
+ on_setattr=None,
+ ),
+ )
+ cls_counter = 0
+
+ def __init__(
+ self,
+ default,
+ validator,
+ repr,
+ cmp,
+ hash,
+ init,
+ converter,
+ metadata,
+ type,
+ kw_only,
+ eq,
+ eq_key,
+ order,
+ order_key,
+ on_setattr,
+ alias,
+ ):
+ _CountingAttr.cls_counter += 1
+ self.counter = _CountingAttr.cls_counter
+ self._default = default
+ self._validator = validator
+ self.converter = converter
+ self.repr = repr
+ self.eq = eq
+ self.eq_key = eq_key
+ self.order = order
+ self.order_key = order_key
+ self.hash = hash
+ self.init = init
+ self.metadata = metadata
+ self.type = type
+ self.kw_only = kw_only
+ self.on_setattr = on_setattr
+ self.alias = alias
+
+ def validator(self, meth):
+ """
+ Decorator that adds *meth* to the list of validators.
+
+ Returns *meth* unchanged.
+
+ .. versionadded:: 17.1.0
+ """
+ if self._validator is None:
+ self._validator = meth
+ else:
+ self._validator = and_(self._validator, meth)
+ return meth
+
+ def default(self, meth):
+ """
+ Decorator that allows to set the default for an attribute.
+
+ Returns *meth* unchanged.
+
+ Raises:
+ DefaultAlreadySetError: If default has been set before.
+
+ .. versionadded:: 17.1.0
+ """
+ if self._default is not NOTHING:
+ raise DefaultAlreadySetError()
+
+ self._default = Factory(meth, takes_self=True)
+
+ return meth
+
+
+_CountingAttr = _add_eq(_add_repr(_CountingAttr))
+
+
+class Factory:
+ """
+ Stores a factory callable.
+
+ If passed as the default value to `attrs.field`, the factory is used to
+ generate a new value.
+
+ Args:
+ factory (typing.Callable):
+ A callable that takes either none or exactly one mandatory
+ positional argument depending on *takes_self*.
+
+ takes_self (bool):
+ Pass the partially initialized instance that is being initialized
+ as a positional argument.
+
+ .. versionadded:: 17.1.0 *takes_self*
+ """
+
+ __slots__ = ("factory", "takes_self")
+
+ def __init__(self, factory, takes_self=False):
+ self.factory = factory
+ self.takes_self = takes_self
+
+ def __getstate__(self):
+ """
+ Play nice with pickle.
+ """
+ return tuple(getattr(self, name) for name in self.__slots__)
+
+ def __setstate__(self, state):
+ """
+ Play nice with pickle.
+ """
+ for name, value in zip(self.__slots__, state):
+ setattr(self, name, value)
+
+
+_f = [
+ Attribute(
+ name=name,
+ default=NOTHING,
+ validator=None,
+ repr=True,
+ cmp=None,
+ eq=True,
+ order=False,
+ hash=True,
+ init=True,
+ inherited=False,
+ )
+ for name in Factory.__slots__
+]
+
+Factory = _add_hash(_add_eq(_add_repr(Factory, attrs=_f), attrs=_f), attrs=_f)
+
+
+class Converter:
+ """
+ Stores a converter callable.
+
+ Allows for the wrapped converter to take additional arguments. The
+ arguments are passed in the order they are documented.
+
+ Args:
+ converter (Callable): A callable that converts the passed value.
+
+ takes_self (bool):
+ Pass the partially initialized instance that is being initialized
+ as a positional argument. (default: `False`)
+
+ takes_field (bool):
+ Pass the field definition (an :class:`Attribute`) into the
+ converter as a positional argument. (default: `False`)
+
+ .. versionadded:: 24.1.0
+ """
+
+ __slots__ = (
+ "converter",
+ "takes_self",
+ "takes_field",
+ "_first_param_type",
+ "_global_name",
+ "__call__",
+ )
+
+ def __init__(self, converter, *, takes_self=False, takes_field=False):
+ self.converter = converter
+ self.takes_self = takes_self
+ self.takes_field = takes_field
+
+ ex = _AnnotationExtractor(converter)
+ self._first_param_type = ex.get_first_param_type()
+
+ if not (self.takes_self or self.takes_field):
+ self.__call__ = lambda value, _, __: self.converter(value)
+ elif self.takes_self and not self.takes_field:
+ self.__call__ = lambda value, instance, __: self.converter(
+ value, instance
+ )
+ elif not self.takes_self and self.takes_field:
+ self.__call__ = lambda value, __, field: self.converter(
+ value, field
+ )
+ else:
+ self.__call__ = lambda value, instance, field: self.converter(
+ value, instance, field
+ )
+
+ rt = ex.get_return_type()
+ if rt is not None:
+ self.__call__.__annotations__["return"] = rt
+
+ @staticmethod
+ def _get_global_name(attr_name: str) -> str:
+ """
+ Return the name that a converter for an attribute name *attr_name*
+ would have.
+ """
+ return f"__attr_converter_{attr_name}"
+
+ def _fmt_converter_call(self, attr_name: str, value_var: str) -> str:
+ """
+ Return a string that calls the converter for an attribute name
+ *attr_name* and the value in variable named *value_var* according to
+ `self.takes_self` and `self.takes_field`.
+ """
+ if not (self.takes_self or self.takes_field):
+ return f"{self._get_global_name(attr_name)}({value_var})"
+
+ if self.takes_self and self.takes_field:
+ return f"{self._get_global_name(attr_name)}({value_var}, self, attr_dict['{attr_name}'])"
+
+ if self.takes_self:
+ return f"{self._get_global_name(attr_name)}({value_var}, self)"
+
+ return f"{self._get_global_name(attr_name)}({value_var}, attr_dict['{attr_name}'])"
+
+ def __getstate__(self):
+ """
+ Return a dict containing only converter and takes_self -- the rest gets
+ computed when loading.
+ """
+ return {
+ "converter": self.converter,
+ "takes_self": self.takes_self,
+ "takes_field": self.takes_field,
+ }
+
+ def __setstate__(self, state):
+ """
+ Load instance from state.
+ """
+ self.__init__(**state)
+
+
+_f = [
+ Attribute(
+ name=name,
+ default=NOTHING,
+ validator=None,
+ repr=True,
+ cmp=None,
+ eq=True,
+ order=False,
+ hash=True,
+ init=True,
+ inherited=False,
+ )
+ for name in ("converter", "takes_self", "takes_field")
+]
+
+Converter = _add_hash(
+ _add_eq(_add_repr(Converter, attrs=_f), attrs=_f), attrs=_f
+)
+
+
+def make_class(
+ name, attrs, bases=(object,), class_body=None, **attributes_arguments
+):
+ r"""
+ A quick way to create a new class called *name* with *attrs*.
+
+ Args:
+ name (str): The name for the new class.
+
+ attrs( list | dict):
+ A list of names or a dictionary of mappings of names to `attr.ib`\
+ s / `attrs.field`\ s.
+
+ The order is deduced from the order of the names or attributes
+ inside *attrs*. Otherwise the order of the definition of the
+ attributes is used.
+
+ bases (tuple[type, ...]): Classes that the new class will subclass.
+
+ class_body (dict):
+ An optional dictionary of class attributes for the new class.
+
+ attributes_arguments: Passed unmodified to `attr.s`.
+
+ Returns:
+ type: A new class with *attrs*.
+
+ .. versionadded:: 17.1.0 *bases*
+ .. versionchanged:: 18.1.0 If *attrs* is ordered, the order is retained.
+ .. versionchanged:: 23.2.0 *class_body*
+ """
+ if isinstance(attrs, dict):
+ cls_dict = attrs
+ elif isinstance(attrs, (list, tuple)):
+ cls_dict = {a: attrib() for a in attrs}
+ else:
+ msg = "attrs argument must be a dict or a list."
+ raise TypeError(msg)
+
+ pre_init = cls_dict.pop("__attrs_pre_init__", None)
+ post_init = cls_dict.pop("__attrs_post_init__", None)
+ user_init = cls_dict.pop("__init__", None)
+
+ body = {}
+ if class_body is not None:
+ body.update(class_body)
+ if pre_init is not None:
+ body["__attrs_pre_init__"] = pre_init
+ if post_init is not None:
+ body["__attrs_post_init__"] = post_init
+ if user_init is not None:
+ body["__init__"] = user_init
+
+ type_ = types.new_class(name, bases, {}, lambda ns: ns.update(body))
+
+ # For pickling to work, the __module__ variable needs to be set to the
+ # frame where the class is created. Bypass this step in environments where
+ # sys._getframe is not defined (Jython for example) or sys._getframe is not
+ # defined for arguments greater than 0 (IronPython).
+ with contextlib.suppress(AttributeError, ValueError):
+ type_.__module__ = sys._getframe(1).f_globals.get(
+ "__name__", "__main__"
+ )
+
+ # We do it here for proper warnings with meaningful stacklevel.
+ cmp = attributes_arguments.pop("cmp", None)
+ (
+ attributes_arguments["eq"],
+ attributes_arguments["order"],
+ ) = _determine_attrs_eq_order(
+ cmp,
+ attributes_arguments.get("eq"),
+ attributes_arguments.get("order"),
+ True,
+ )
+
+ cls = _attrs(these=cls_dict, **attributes_arguments)(type_)
+ # Only add type annotations now or "_attrs()" will complain:
+ cls.__annotations__ = {
+ k: v.type for k, v in cls_dict.items() if v.type is not None
+ }
+ return cls
+
+
+# These are required by within this module so we define them here and merely
+# import into .validators / .converters.
+
+
+@attrs(slots=True, unsafe_hash=True)
+class _AndValidator:
+ """
+ Compose many validators to a single one.
+ """
+
+ _validators = attrib()
+
+ def __call__(self, inst, attr, value):
+ for v in self._validators:
+ v(inst, attr, value)
+
+
+def and_(*validators):
+ """
+ A validator that composes multiple validators into one.
+
+ When called on a value, it runs all wrapped validators.
+
+ Args:
+ validators (~collections.abc.Iterable[typing.Callable]):
+ Arbitrary number of validators.
+
+ .. versionadded:: 17.1.0
+ """
+ vals = []
+ for validator in validators:
+ vals.extend(
+ validator._validators
+ if isinstance(validator, _AndValidator)
+ else [validator]
+ )
+
+ return _AndValidator(tuple(vals))
+
+
+def pipe(*converters):
+ """
+ A converter that composes multiple converters into one.
+
+ When called on a value, it runs all wrapped converters, returning the
+ *last* value.
+
+ Type annotations will be inferred from the wrapped converters', if they
+ have any.
+
+ converters (~collections.abc.Iterable[typing.Callable]):
+ Arbitrary number of converters.
+
+ .. versionadded:: 20.1.0
+ """
+
+ def pipe_converter(val, inst, field):
+ for c in converters:
+ val = c(val, inst, field) if isinstance(c, Converter) else c(val)
+
+ return val
+
+ if not converters:
+ # If the converter list is empty, pipe_converter is the identity.
+ A = typing.TypeVar("A")
+ pipe_converter.__annotations__.update({"val": A, "return": A})
+ else:
+ # Get parameter type from first converter.
+ t = _AnnotationExtractor(converters[0]).get_first_param_type()
+ if t:
+ pipe_converter.__annotations__["val"] = t
+
+ last = converters[-1]
+ if not PY_3_11_PLUS and isinstance(last, Converter):
+ last = last.__call__
+
+ # Get return type from last converter.
+ rt = _AnnotationExtractor(last).get_return_type()
+ if rt:
+ pipe_converter.__annotations__["return"] = rt
+
+ return Converter(pipe_converter, takes_self=True, takes_field=True)
diff --git a/lambdas/aws-dd-forwarder-3.127.0/attr/_next_gen.py b/lambdas/aws-dd-forwarder-3.127.0/attr/_next_gen.py
new file mode 100644
index 0000000..dbb65cc
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/attr/_next_gen.py
@@ -0,0 +1,631 @@
+# SPDX-License-Identifier: MIT
+
+"""
+These are keyword-only APIs that call `attr.s` and `attr.ib` with different
+default values.
+"""
+
+
+from functools import partial
+
+from . import setters
+from ._funcs import asdict as _asdict
+from ._funcs import astuple as _astuple
+from ._make import (
+ _DEFAULT_ON_SETATTR,
+ NOTHING,
+ _frozen_setattrs,
+ attrib,
+ attrs,
+)
+from .exceptions import UnannotatedAttributeError
+
+
+def define(
+ maybe_cls=None,
+ *,
+ these=None,
+ repr=None,
+ unsafe_hash=None,
+ hash=None,
+ init=None,
+ slots=True,
+ frozen=False,
+ weakref_slot=True,
+ str=False,
+ auto_attribs=None,
+ kw_only=False,
+ cache_hash=False,
+ auto_exc=True,
+ eq=None,
+ order=False,
+ auto_detect=True,
+ getstate_setstate=None,
+ on_setattr=None,
+ field_transformer=None,
+ match_args=True,
+):
+ r"""
+ A class decorator that adds :term:`dunder methods` according to
+ :term:`fields ` specified using :doc:`type annotations `,
+ `field()` calls, or the *these* argument.
+
+ Since *attrs* patches or replaces an existing class, you cannot use
+ `object.__init_subclass__` with *attrs* classes, because it runs too early.
+ As a replacement, you can define ``__attrs_init_subclass__`` on your class.
+ It will be called by *attrs* classes that subclass it after they're
+ created. See also :ref:`init-subclass`.
+
+ Args:
+ slots (bool):
+ Create a :term:`slotted class ` that's more
+ memory-efficient. Slotted classes are generally superior to the
+ default dict classes, but have some gotchas you should know about,
+ so we encourage you to read the :term:`glossary entry `.
+
+ auto_detect (bool):
+ Instead of setting the *init*, *repr*, *eq*, and *hash* arguments
+ explicitly, assume they are set to True **unless any** of the
+ involved methods for one of the arguments is implemented in the
+ *current* class (meaning, it is *not* inherited from some base
+ class).
+
+ So, for example by implementing ``__eq__`` on a class yourself,
+ *attrs* will deduce ``eq=False`` and will create *neither*
+ ``__eq__`` *nor* ``__ne__`` (but Python classes come with a
+ sensible ``__ne__`` by default, so it *should* be enough to only
+ implement ``__eq__`` in most cases).
+
+ Passing True or False` to *init*, *repr*, *eq*, *cmp*, or *hash*
+ overrides whatever *auto_detect* would determine.
+
+ auto_exc (bool):
+ If the class subclasses `BaseException` (which implicitly includes
+ any subclass of any exception), the following happens to behave
+ like a well-behaved Python exception class:
+
+ - the values for *eq*, *order*, and *hash* are ignored and the
+ instances compare and hash by the instance's ids [#]_ ,
+ - all attributes that are either passed into ``__init__`` or have a
+ default value are additionally available as a tuple in the
+ ``args`` attribute,
+ - the value of *str* is ignored leaving ``__str__`` to base
+ classes.
+
+ .. [#]
+ Note that *attrs* will *not* remove existing implementations of
+ ``__hash__`` or the equality methods. It just won't add own
+ ones.
+
+ on_setattr (~typing.Callable | list[~typing.Callable] | None | ~typing.Literal[attrs.setters.NO_OP]):
+ A callable that is run whenever the user attempts to set an
+ attribute (either by assignment like ``i.x = 42`` or by using
+ `setattr` like ``setattr(i, "x", 42)``). It receives the same
+ arguments as validators: the instance, the attribute that is being
+ modified, and the new value.
+
+ If no exception is raised, the attribute is set to the return value
+ of the callable.
+
+ If a list of callables is passed, they're automatically wrapped in
+ an `attrs.setters.pipe`.
+
+ If left None, the default behavior is to run converters and
+ validators whenever an attribute is set.
+
+ init (bool):
+ Create a ``__init__`` method that initializes the *attrs*
+ attributes. Leading underscores are stripped for the argument name,
+ unless an alias is set on the attribute.
+
+ .. seealso::
+ `init` shows advanced ways to customize the generated
+ ``__init__`` method, including executing code before and after.
+
+ repr(bool):
+ Create a ``__repr__`` method with a human readable representation
+ of *attrs* attributes.
+
+ str (bool):
+ Create a ``__str__`` method that is identical to ``__repr__``. This
+ is usually not necessary except for `Exception`\ s.
+
+ eq (bool | None):
+ If True or None (default), add ``__eq__`` and ``__ne__`` methods
+ that check two instances for equality.
+
+ .. seealso::
+ `comparison` describes how to customize the comparison behavior
+ going as far comparing NumPy arrays.
+
+ order (bool | None):
+ If True, add ``__lt__``, ``__le__``, ``__gt__``, and ``__ge__``
+ methods that behave like *eq* above and allow instances to be
+ ordered.
+
+ They compare the instances as if they were tuples of their *attrs*
+ attributes if and only if the types of both classes are
+ *identical*.
+
+ If `None` mirror value of *eq*.
+
+ .. seealso:: `comparison`
+
+ cmp (bool | None):
+ Setting *cmp* is equivalent to setting *eq* and *order* to the same
+ value. Must not be mixed with *eq* or *order*.
+
+ unsafe_hash (bool | None):
+ If None (default), the ``__hash__`` method is generated according
+ how *eq* and *frozen* are set.
+
+ 1. If *both* are True, *attrs* will generate a ``__hash__`` for
+ you.
+ 2. If *eq* is True and *frozen* is False, ``__hash__`` will be set
+ to None, marking it unhashable (which it is).
+ 3. If *eq* is False, ``__hash__`` will be left untouched meaning
+ the ``__hash__`` method of the base class will be used. If the
+ base class is `object`, this means it will fall back to id-based
+ hashing.
+
+ Although not recommended, you can decide for yourself and force
+ *attrs* to create one (for example, if the class is immutable even
+ though you didn't freeze it programmatically) by passing True or
+ not. Both of these cases are rather special and should be used
+ carefully.
+
+ .. seealso::
+
+ - Our documentation on `hashing`,
+ - Python's documentation on `object.__hash__`,
+ - and the `GitHub issue that led to the default \ behavior
+ `_ for more
+ details.
+
+ hash (bool | None):
+ Deprecated alias for *unsafe_hash*. *unsafe_hash* takes precedence.
+
+ cache_hash (bool):
+ Ensure that the object's hash code is computed only once and stored
+ on the object. If this is set to True, hashing must be either
+ explicitly or implicitly enabled for this class. If the hash code
+ is cached, avoid any reassignments of fields involved in hash code
+ computation or mutations of the objects those fields point to after
+ object creation. If such changes occur, the behavior of the
+ object's hash code is undefined.
+
+ frozen (bool):
+ Make instances immutable after initialization. If someone attempts
+ to modify a frozen instance, `attrs.exceptions.FrozenInstanceError`
+ is raised.
+
+ .. note::
+
+ 1. This is achieved by installing a custom ``__setattr__``
+ method on your class, so you can't implement your own.
+
+ 2. True immutability is impossible in Python.
+
+ 3. This *does* have a minor a runtime performance `impact
+ ` when initializing new instances. In other
+ words: ``__init__`` is slightly slower with ``frozen=True``.
+
+ 4. If a class is frozen, you cannot modify ``self`` in
+ ``__attrs_post_init__`` or a self-written ``__init__``. You
+ can circumvent that limitation by using
+ ``object.__setattr__(self, "attribute_name", value)``.
+
+ 5. Subclasses of a frozen class are frozen too.
+
+ kw_only (bool):
+ Make all attributes keyword-only in the generated ``__init__`` (if
+ *init* is False, this parameter is ignored).
+
+ weakref_slot (bool):
+ Make instances weak-referenceable. This has no effect unless
+ *slots* is True.
+
+ field_transformer (~typing.Callable | None):
+ A function that is called with the original class object and all
+ fields right before *attrs* finalizes the class. You can use this,
+ for example, to automatically add converters or validators to
+ fields based on their types.
+
+ .. seealso:: `transform-fields`
+
+ match_args (bool):
+ If True (default), set ``__match_args__`` on the class to support
+ :pep:`634` (*Structural Pattern Matching*). It is a tuple of all
+ non-keyword-only ``__init__`` parameter names on Python 3.10 and
+ later. Ignored on older Python versions.
+
+ collect_by_mro (bool):
+ If True, *attrs* collects attributes from base classes correctly
+ according to the `method resolution order
+ `_. If False, *attrs*
+ will mimic the (wrong) behavior of `dataclasses` and :pep:`681`.
+
+ See also `issue #428
+ `_.
+
+ getstate_setstate (bool | None):
+ .. note::
+
+ This is usually only interesting for slotted classes and you
+ should probably just set *auto_detect* to True.
+
+ If True, ``__getstate__`` and ``__setstate__`` are generated and
+ attached to the class. This is necessary for slotted classes to be
+ pickleable. If left None, it's True by default for slotted classes
+ and False for dict classes.
+
+ If *auto_detect* is True, and *getstate_setstate* is left None, and
+ **either** ``__getstate__`` or ``__setstate__`` is detected
+ directly on the class (meaning: not inherited), it is set to False
+ (this is usually what you want).
+
+ auto_attribs (bool | None):
+ If True, look at type annotations to determine which attributes to
+ use, like `dataclasses`. If False, it will only look for explicit
+ :func:`field` class attributes, like classic *attrs*.
+
+ If left None, it will guess:
+
+ 1. If any attributes are annotated and no unannotated
+ `attrs.field`\ s are found, it assumes *auto_attribs=True*.
+ 2. Otherwise it assumes *auto_attribs=False* and tries to collect
+ `attrs.field`\ s.
+
+ If *attrs* decides to look at type annotations, **all** fields
+ **must** be annotated. If *attrs* encounters a field that is set to
+ a :func:`field` / `attr.ib` but lacks a type annotation, an
+ `attrs.exceptions.UnannotatedAttributeError` is raised. Use
+ ``field_name: typing.Any = field(...)`` if you don't want to set a
+ type.
+
+ .. warning::
+
+ For features that use the attribute name to create decorators
+ (for example, :ref:`validators `), you still *must*
+ assign :func:`field` / `attr.ib` to them. Otherwise Python will
+ either not find the name or try to use the default value to
+ call, for example, ``validator`` on it.
+
+ Attributes annotated as `typing.ClassVar`, and attributes that are
+ neither annotated nor set to an `field()` are **ignored**.
+
+ these (dict[str, object]):
+ A dictionary of name to the (private) return value of `field()`
+ mappings. This is useful to avoid the definition of your attributes
+ within the class body because you can't (for example, if you want
+ to add ``__repr__`` methods to Django models) or don't want to.
+
+ If *these* is not `None`, *attrs* will *not* search the class body
+ for attributes and will *not* remove any attributes from it.
+
+ The order is deduced from the order of the attributes inside
+ *these*.
+
+ Arguably, this is a rather obscure feature.
+
+ .. versionadded:: 20.1.0
+ .. versionchanged:: 21.3.0 Converters are also run ``on_setattr``.
+ .. versionadded:: 22.2.0
+ *unsafe_hash* as an alias for *hash* (for :pep:`681` compliance).
+ .. versionchanged:: 24.1.0
+ Instances are not compared as tuples of attributes anymore, but using a
+ big ``and`` condition. This is faster and has more correct behavior for
+ uncomparable values like `math.nan`.
+ .. versionadded:: 24.1.0
+ If a class has an *inherited* classmethod called
+ ``__attrs_init_subclass__``, it is executed after the class is created.
+ .. deprecated:: 24.1.0 *hash* is deprecated in favor of *unsafe_hash*.
+
+ .. note::
+
+ The main differences to the classic `attr.s` are:
+
+ - Automatically detect whether or not *auto_attribs* should be `True`
+ (c.f. *auto_attribs* parameter).
+ - Converters and validators run when attributes are set by default --
+ if *frozen* is `False`.
+ - *slots=True*
+
+ Usually, this has only upsides and few visible effects in everyday
+ programming. But it *can* lead to some surprising behaviors, so
+ please make sure to read :term:`slotted classes`.
+
+ - *auto_exc=True*
+ - *auto_detect=True*
+ - *order=False*
+ - Some options that were only relevant on Python 2 or were kept around
+ for backwards-compatibility have been removed.
+
+ """
+
+ def do_it(cls, auto_attribs):
+ return attrs(
+ maybe_cls=cls,
+ these=these,
+ repr=repr,
+ hash=hash,
+ unsafe_hash=unsafe_hash,
+ init=init,
+ slots=slots,
+ frozen=frozen,
+ weakref_slot=weakref_slot,
+ str=str,
+ auto_attribs=auto_attribs,
+ kw_only=kw_only,
+ cache_hash=cache_hash,
+ auto_exc=auto_exc,
+ eq=eq,
+ order=order,
+ auto_detect=auto_detect,
+ collect_by_mro=True,
+ getstate_setstate=getstate_setstate,
+ on_setattr=on_setattr,
+ field_transformer=field_transformer,
+ match_args=match_args,
+ )
+
+ def wrap(cls):
+ """
+ Making this a wrapper ensures this code runs during class creation.
+
+ We also ensure that frozen-ness of classes is inherited.
+ """
+ nonlocal frozen, on_setattr
+
+ had_on_setattr = on_setattr not in (None, setters.NO_OP)
+
+ # By default, mutable classes convert & validate on setattr.
+ if frozen is False and on_setattr is None:
+ on_setattr = _DEFAULT_ON_SETATTR
+
+ # However, if we subclass a frozen class, we inherit the immutability
+ # and disable on_setattr.
+ for base_cls in cls.__bases__:
+ if base_cls.__setattr__ is _frozen_setattrs:
+ if had_on_setattr:
+ msg = "Frozen classes can't use on_setattr (frozen-ness was inherited)."
+ raise ValueError(msg)
+
+ on_setattr = setters.NO_OP
+ break
+
+ if auto_attribs is not None:
+ return do_it(cls, auto_attribs)
+
+ try:
+ return do_it(cls, True)
+ except UnannotatedAttributeError:
+ return do_it(cls, False)
+
+ # maybe_cls's type depends on the usage of the decorator. It's a class
+ # if it's used as `@attrs` but `None` if used as `@attrs()`.
+ if maybe_cls is None:
+ return wrap
+
+ return wrap(maybe_cls)
+
+
+mutable = define
+frozen = partial(define, frozen=True, on_setattr=None)
+
+
+def field(
+ *,
+ default=NOTHING,
+ validator=None,
+ repr=True,
+ hash=None,
+ init=True,
+ metadata=None,
+ type=None,
+ converter=None,
+ factory=None,
+ kw_only=False,
+ eq=None,
+ order=None,
+ on_setattr=None,
+ alias=None,
+):
+ """
+ Create a new :term:`field` / :term:`attribute` on a class.
+
+ .. warning::
+
+ Does **nothing** unless the class is also decorated with
+ `attrs.define` (or similar)!
+
+ Args:
+ default:
+ A value that is used if an *attrs*-generated ``__init__`` is used
+ and no value is passed while instantiating or the attribute is
+ excluded using ``init=False``.
+
+ If the value is an instance of `attrs.Factory`, its callable will
+ be used to construct a new value (useful for mutable data types
+ like lists or dicts).
+
+ If a default is not set (or set manually to `attrs.NOTHING`), a
+ value *must* be supplied when instantiating; otherwise a
+ `TypeError` will be raised.
+
+ .. seealso:: `defaults`
+
+ factory (~typing.Callable):
+ Syntactic sugar for ``default=attr.Factory(factory)``.
+
+ validator (~typing.Callable | list[~typing.Callable]):
+ Callable that is called by *attrs*-generated ``__init__`` methods
+ after the instance has been initialized. They receive the
+ initialized instance, the :func:`~attrs.Attribute`, and the passed
+ value.
+
+ The return value is *not* inspected so the validator has to throw
+ an exception itself.
+
+ If a `list` is passed, its items are treated as validators and must
+ all pass.
+
+ Validators can be globally disabled and re-enabled using
+ `attrs.validators.get_disabled` / `attrs.validators.set_disabled`.
+
+ The validator can also be set using decorator notation as shown
+ below.
+
+ .. seealso:: :ref:`validators`
+
+ repr (bool | ~typing.Callable):
+ Include this attribute in the generated ``__repr__`` method. If
+ True, include the attribute; if False, omit it. By default, the
+ built-in ``repr()`` function is used. To override how the attribute
+ value is formatted, pass a ``callable`` that takes a single value
+ and returns a string. Note that the resulting string is used as-is,
+ which means it will be used directly *instead* of calling
+ ``repr()`` (the default).
+
+ eq (bool | ~typing.Callable):
+ If True (default), include this attribute in the generated
+ ``__eq__`` and ``__ne__`` methods that check two instances for
+ equality. To override how the attribute value is compared, pass a
+ callable that takes a single value and returns the value to be
+ compared.
+
+ .. seealso:: `comparison`
+
+ order (bool | ~typing.Callable):
+ If True (default), include this attributes in the generated
+ ``__lt__``, ``__le__``, ``__gt__`` and ``__ge__`` methods. To
+ override how the attribute value is ordered, pass a callable that
+ takes a single value and returns the value to be ordered.
+
+ .. seealso:: `comparison`
+
+ cmp(bool | ~typing.Callable):
+ Setting *cmp* is equivalent to setting *eq* and *order* to the same
+ value. Must not be mixed with *eq* or *order*.
+
+ .. seealso:: `comparison`
+
+ hash (bool | None):
+ Include this attribute in the generated ``__hash__`` method. If
+ None (default), mirror *eq*'s value. This is the correct behavior
+ according the Python spec. Setting this value to anything else
+ than None is *discouraged*.
+
+ .. seealso:: `hashing`
+
+ init (bool):
+ Include this attribute in the generated ``__init__`` method.
+
+ It is possible to set this to False and set a default value. In
+ that case this attributed is unconditionally initialized with the
+ specified default value or factory.
+
+ .. seealso:: `init`
+
+ converter (typing.Callable | Converter):
+ A callable that is called by *attrs*-generated ``__init__`` methods
+ to convert attribute's value to the desired format.
+
+ If a vanilla callable is passed, it is given the passed-in value as
+ the only positional argument. It is possible to receive additional
+ arguments by wrapping the callable in a `Converter`.
+
+ Either way, the returned value will be used as the new value of the
+ attribute. The value is converted before being passed to the
+ validator, if any.
+
+ .. seealso:: :ref:`converters`
+
+ metadata (dict | None):
+ An arbitrary mapping, to be used by third-party code.
+
+ .. seealso:: `extending-metadata`.
+
+ type (type):
+ The type of the attribute. Nowadays, the preferred method to
+ specify the type is using a variable annotation (see :pep:`526`).
+ This argument is provided for backwards-compatibility and for usage
+ with `make_class`. Regardless of the approach used, the type will
+ be stored on ``Attribute.type``.
+
+ Please note that *attrs* doesn't do anything with this metadata by
+ itself. You can use it as part of your own code or for `static type
+ checking `.
+
+ kw_only (bool):
+ Make this attribute keyword-only in the generated ``__init__`` (if
+ ``init`` is False, this parameter is ignored).
+
+ on_setattr (~typing.Callable | list[~typing.Callable] | None | ~typing.Literal[attrs.setters.NO_OP]):
+ Allows to overwrite the *on_setattr* setting from `attr.s`. If left
+ None, the *on_setattr* value from `attr.s` is used. Set to
+ `attrs.setters.NO_OP` to run **no** `setattr` hooks for this
+ attribute -- regardless of the setting in `define()`.
+
+ alias (str | None):
+ Override this attribute's parameter name in the generated
+ ``__init__`` method. If left None, default to ``name`` stripped
+ of leading underscores. See `private-attributes`.
+
+ .. versionadded:: 20.1.0
+ .. versionchanged:: 21.1.0
+ *eq*, *order*, and *cmp* also accept a custom callable
+ .. versionadded:: 22.2.0 *alias*
+ .. versionadded:: 23.1.0
+ The *type* parameter has been re-added; mostly for `attrs.make_class`.
+ Please note that type checkers ignore this metadata.
+
+ .. seealso::
+
+ `attr.ib`
+ """
+ return attrib(
+ default=default,
+ validator=validator,
+ repr=repr,
+ hash=hash,
+ init=init,
+ metadata=metadata,
+ type=type,
+ converter=converter,
+ factory=factory,
+ kw_only=kw_only,
+ eq=eq,
+ order=order,
+ on_setattr=on_setattr,
+ alias=alias,
+ )
+
+
+def asdict(inst, *, recurse=True, filter=None, value_serializer=None):
+ """
+ Same as `attr.asdict`, except that collections types are always retained
+ and dict is always used as *dict_factory*.
+
+ .. versionadded:: 21.3.0
+ """
+ return _asdict(
+ inst=inst,
+ recurse=recurse,
+ filter=filter,
+ value_serializer=value_serializer,
+ retain_collection_types=True,
+ )
+
+
+def astuple(inst, *, recurse=True, filter=None):
+ """
+ Same as `attr.astuple`, except that collections types are always retained
+ and `tuple` is always used as the *tuple_factory*.
+
+ .. versionadded:: 21.3.0
+ """
+ return _astuple(
+ inst=inst, recurse=recurse, filter=filter, retain_collection_types=True
+ )
diff --git a/lambdas/aws-dd-forwarder-3.127.0/attr/_typing_compat.pyi b/lambdas/aws-dd-forwarder-3.127.0/attr/_typing_compat.pyi
new file mode 100644
index 0000000..ca7b71e
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/attr/_typing_compat.pyi
@@ -0,0 +1,15 @@
+from typing import Any, ClassVar, Protocol
+
+# MYPY is a special constant in mypy which works the same way as `TYPE_CHECKING`.
+MYPY = False
+
+if MYPY:
+ # A protocol to be able to statically accept an attrs class.
+ class AttrsInstance_(Protocol):
+ __attrs_attrs__: ClassVar[Any]
+
+else:
+ # For type checkers without plug-in support use an empty protocol that
+ # will (hopefully) be combined into a union.
+ class AttrsInstance_(Protocol):
+ pass
diff --git a/lambdas/aws-dd-forwarder-3.127.0/attr/_version_info.py b/lambdas/aws-dd-forwarder-3.127.0/attr/_version_info.py
new file mode 100644
index 0000000..51a1312
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/attr/_version_info.py
@@ -0,0 +1,86 @@
+# SPDX-License-Identifier: MIT
+
+
+from functools import total_ordering
+
+from ._funcs import astuple
+from ._make import attrib, attrs
+
+
+@total_ordering
+@attrs(eq=False, order=False, slots=True, frozen=True)
+class VersionInfo:
+ """
+ A version object that can be compared to tuple of length 1--4:
+
+ >>> attr.VersionInfo(19, 1, 0, "final") <= (19, 2)
+ True
+ >>> attr.VersionInfo(19, 1, 0, "final") < (19, 1, 1)
+ True
+ >>> vi = attr.VersionInfo(19, 2, 0, "final")
+ >>> vi < (19, 1, 1)
+ False
+ >>> vi < (19,)
+ False
+ >>> vi == (19, 2,)
+ True
+ >>> vi == (19, 2, 1)
+ False
+
+ .. versionadded:: 19.2
+ """
+
+ year = attrib(type=int)
+ minor = attrib(type=int)
+ micro = attrib(type=int)
+ releaselevel = attrib(type=str)
+
+ @classmethod
+ def _from_version_string(cls, s):
+ """
+ Parse *s* and return a _VersionInfo.
+ """
+ v = s.split(".")
+ if len(v) == 3:
+ v.append("final")
+
+ return cls(
+ year=int(v[0]), minor=int(v[1]), micro=int(v[2]), releaselevel=v[3]
+ )
+
+ def _ensure_tuple(self, other):
+ """
+ Ensure *other* is a tuple of a valid length.
+
+ Returns a possibly transformed *other* and ourselves as a tuple of
+ the same length as *other*.
+ """
+
+ if self.__class__ is other.__class__:
+ other = astuple(other)
+
+ if not isinstance(other, tuple):
+ raise NotImplementedError
+
+ if not (1 <= len(other) <= 4):
+ raise NotImplementedError
+
+ return astuple(self)[: len(other)], other
+
+ def __eq__(self, other):
+ try:
+ us, them = self._ensure_tuple(other)
+ except NotImplementedError:
+ return NotImplemented
+
+ return us == them
+
+ def __lt__(self, other):
+ try:
+ us, them = self._ensure_tuple(other)
+ except NotImplementedError:
+ return NotImplemented
+
+ # Since alphabetically "dev0" < "final" < "post1" < "post2", we don't
+ # have to do anything special with releaselevel for now.
+ return us < them
diff --git a/lambdas/aws-dd-forwarder-3.127.0/attr/_version_info.pyi b/lambdas/aws-dd-forwarder-3.127.0/attr/_version_info.pyi
new file mode 100644
index 0000000..45ced08
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/attr/_version_info.pyi
@@ -0,0 +1,9 @@
+class VersionInfo:
+ @property
+ def year(self) -> int: ...
+ @property
+ def minor(self) -> int: ...
+ @property
+ def micro(self) -> int: ...
+ @property
+ def releaselevel(self) -> str: ...
diff --git a/lambdas/aws-dd-forwarder-3.127.0/attr/converters.py b/lambdas/aws-dd-forwarder-3.127.0/attr/converters.py
new file mode 100644
index 0000000..9238311
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/attr/converters.py
@@ -0,0 +1,151 @@
+# SPDX-License-Identifier: MIT
+
+"""
+Commonly useful converters.
+"""
+
+
+import typing
+
+from ._compat import _AnnotationExtractor
+from ._make import NOTHING, Factory, pipe
+
+
+__all__ = [
+ "default_if_none",
+ "optional",
+ "pipe",
+ "to_bool",
+]
+
+
+def optional(converter):
+ """
+ A converter that allows an attribute to be optional. An optional attribute
+ is one which can be set to `None`.
+
+ Type annotations will be inferred from the wrapped converter's, if it has
+ any.
+
+ Args:
+ converter (typing.Callable):
+ the converter that is used for non-`None` values.
+
+ .. versionadded:: 17.1.0
+ """
+
+ def optional_converter(val):
+ if val is None:
+ return None
+ return converter(val)
+
+ xtr = _AnnotationExtractor(converter)
+
+ t = xtr.get_first_param_type()
+ if t:
+ optional_converter.__annotations__["val"] = typing.Optional[t]
+
+ rt = xtr.get_return_type()
+ if rt:
+ optional_converter.__annotations__["return"] = typing.Optional[rt]
+
+ return optional_converter
+
+
+def default_if_none(default=NOTHING, factory=None):
+ """
+ A converter that allows to replace `None` values by *default* or the result
+ of *factory*.
+
+ Args:
+ default:
+ Value to be used if `None` is passed. Passing an instance of
+ `attrs.Factory` is supported, however the ``takes_self`` option is
+ *not*.
+
+ factory (typing.Callable):
+ A callable that takes no parameters whose result is used if `None`
+ is passed.
+
+ Raises:
+ TypeError: If **neither** *default* or *factory* is passed.
+
+ TypeError: If **both** *default* and *factory* are passed.
+
+ ValueError:
+ If an instance of `attrs.Factory` is passed with
+ ``takes_self=True``.
+
+ .. versionadded:: 18.2.0
+ """
+ if default is NOTHING and factory is None:
+ msg = "Must pass either `default` or `factory`."
+ raise TypeError(msg)
+
+ if default is not NOTHING and factory is not None:
+ msg = "Must pass either `default` or `factory` but not both."
+ raise TypeError(msg)
+
+ if factory is not None:
+ default = Factory(factory)
+
+ if isinstance(default, Factory):
+ if default.takes_self:
+ msg = "`takes_self` is not supported by default_if_none."
+ raise ValueError(msg)
+
+ def default_if_none_converter(val):
+ if val is not None:
+ return val
+
+ return default.factory()
+
+ else:
+
+ def default_if_none_converter(val):
+ if val is not None:
+ return val
+
+ return default
+
+ return default_if_none_converter
+
+
+def to_bool(val):
+ """
+ Convert "boolean" strings (for example, from environment variables) to real
+ booleans.
+
+ Values mapping to `True`:
+
+ - ``True``
+ - ``"true"`` / ``"t"``
+ - ``"yes"`` / ``"y"``
+ - ``"on"``
+ - ``"1"``
+ - ``1``
+
+ Values mapping to `False`:
+
+ - ``False``
+ - ``"false"`` / ``"f"``
+ - ``"no"`` / ``"n"``
+ - ``"off"``
+ - ``"0"``
+ - ``0``
+
+ Raises:
+ ValueError: For any other value.
+
+ .. versionadded:: 21.3.0
+ """
+ if isinstance(val, str):
+ val = val.lower()
+
+ if val in (True, "true", "t", "yes", "y", "on", "1", 1):
+ return True
+ if val in (False, "false", "f", "no", "n", "off", "0", 0):
+ return False
+
+ msg = f"Cannot convert value to bool: {val!r}"
+ raise ValueError(msg)
diff --git a/lambdas/aws-dd-forwarder-3.127.0/attr/converters.pyi b/lambdas/aws-dd-forwarder-3.127.0/attr/converters.pyi
new file mode 100644
index 0000000..9ef478f
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/attr/converters.pyi
@@ -0,0 +1,13 @@
+from typing import Callable, TypeVar, overload
+
+from attrs import _ConverterType
+
+_T = TypeVar("_T")
+
+def pipe(*validators: _ConverterType) -> _ConverterType: ...
+def optional(converter: _ConverterType) -> _ConverterType: ...
+@overload
+def default_if_none(default: _T) -> _ConverterType: ...
+@overload
+def default_if_none(*, factory: Callable[[], _T]) -> _ConverterType: ...
+def to_bool(val: str) -> bool: ...
diff --git a/lambdas/aws-dd-forwarder-3.127.0/attr/exceptions.py b/lambdas/aws-dd-forwarder-3.127.0/attr/exceptions.py
new file mode 100644
index 0000000..3b7abb8
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/attr/exceptions.py
@@ -0,0 +1,95 @@
+# SPDX-License-Identifier: MIT
+
+from __future__ import annotations
+
+from typing import ClassVar
+
+
+class FrozenError(AttributeError):
+ """
+ A frozen/immutable instance or attribute have been attempted to be
+ modified.
+
+ It mirrors the behavior of ``namedtuples`` by using the same error message
+ and subclassing `AttributeError`.
+
+ .. versionadded:: 20.1.0
+ """
+
+ msg = "can't set attribute"
+ args: ClassVar[tuple[str]] = [msg]
+
+
+class FrozenInstanceError(FrozenError):
+ """
+ A frozen instance has been attempted to be modified.
+
+ .. versionadded:: 16.1.0
+ """
+
+
+class FrozenAttributeError(FrozenError):
+ """
+ A frozen attribute has been attempted to be modified.
+
+ .. versionadded:: 20.1.0
+ """
+
+
+class AttrsAttributeNotFoundError(ValueError):
+ """
+ An *attrs* function couldn't find an attribute that the user asked for.
+
+ .. versionadded:: 16.2.0
+ """
+
+
+class NotAnAttrsClassError(ValueError):
+ """
+ A non-*attrs* class has been passed into an *attrs* function.
+
+ .. versionadded:: 16.2.0
+ """
+
+
+class DefaultAlreadySetError(RuntimeError):
+ """
+ A default has been set when defining the field and is attempted to be reset
+ using the decorator.
+
+ .. versionadded:: 17.1.0
+ """
+
+
+class UnannotatedAttributeError(RuntimeError):
+ """
+ A class with ``auto_attribs=True`` has a field without a type annotation.
+
+ .. versionadded:: 17.3.0
+ """
+
+
+class PythonTooOldError(RuntimeError):
+ """
+ It was attempted to use an *attrs* feature that requires a newer Python
+ version.
+
+ .. versionadded:: 18.2.0
+ """
+
+
+class NotCallableError(TypeError):
+ """
+ A field requiring a callable has been set with a value that is not
+ callable.
+
+ .. versionadded:: 19.2.0
+ """
+
+ def __init__(self, msg, value):
+ super(TypeError, self).__init__(msg, value)
+ self.msg = msg
+ self.value = value
+
+ def __str__(self):
+ return str(self.msg)
diff --git a/lambdas/aws-dd-forwarder-3.127.0/attr/exceptions.pyi b/lambdas/aws-dd-forwarder-3.127.0/attr/exceptions.pyi
new file mode 100644
index 0000000..f268011
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/attr/exceptions.pyi
@@ -0,0 +1,17 @@
+from typing import Any
+
+class FrozenError(AttributeError):
+ msg: str = ...
+
+class FrozenInstanceError(FrozenError): ...
+class FrozenAttributeError(FrozenError): ...
+class AttrsAttributeNotFoundError(ValueError): ...
+class NotAnAttrsClassError(ValueError): ...
+class DefaultAlreadySetError(RuntimeError): ...
+class UnannotatedAttributeError(RuntimeError): ...
+class PythonTooOldError(RuntimeError): ...
+
+class NotCallableError(TypeError):
+ msg: str = ...
+ value: Any = ...
+ def __init__(self, msg: str, value: Any) -> None: ...
diff --git a/lambdas/aws-dd-forwarder-3.127.0/attr/filters.py b/lambdas/aws-dd-forwarder-3.127.0/attr/filters.py
new file mode 100644
index 0000000..689b170
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/attr/filters.py
@@ -0,0 +1,72 @@
+# SPDX-License-Identifier: MIT
+
+"""
+Commonly useful filters for `attrs.asdict` and `attrs.astuple`.
+"""
+
+from ._make import Attribute
+
+
+def _split_what(what):
+ """
+ Returns a tuple of `frozenset`s of classes and attributes.
+ """
+ return (
+ frozenset(cls for cls in what if isinstance(cls, type)),
+ frozenset(cls for cls in what if isinstance(cls, str)),
+ frozenset(cls for cls in what if isinstance(cls, Attribute)),
+ )
+
+
+def include(*what):
+ """
+ Create a filter that only allows *what*.
+
+ Args:
+ what (list[type, str, attrs.Attribute]):
+ What to include. Can be a type, a name, or an attribute.
+
+ Returns:
+ Callable:
+ A callable that can be passed to `attrs.asdict`'s and
+ `attrs.astuple`'s *filter* argument.
+
+ .. versionchanged:: 23.1.0 Accept strings with field names.
+ """
+ cls, names, attrs = _split_what(what)
+
+ def include_(attribute, value):
+ return (
+ value.__class__ in cls
+ or attribute.name in names
+ or attribute in attrs
+ )
+
+ return include_
+
+
+def exclude(*what):
+ """
+ Create a filter that does **not** allow *what*.
+
+ Args:
+ what (list[type, str, attrs.Attribute]):
+ What to exclude. Can be a type, a name, or an attribute.
+
+ Returns:
+ Callable:
+ A callable that can be passed to `attrs.asdict`'s and
+ `attrs.astuple`'s *filter* argument.
+
+ .. versionchanged:: 23.3.0 Accept field name string as input argument
+ """
+ cls, names, attrs = _split_what(what)
+
+ def exclude_(attribute, value):
+ return not (
+ value.__class__ in cls
+ or attribute.name in names
+ or attribute in attrs
+ )
+
+ return exclude_
diff --git a/lambdas/aws-dd-forwarder-3.127.0/attr/filters.pyi b/lambdas/aws-dd-forwarder-3.127.0/attr/filters.pyi
new file mode 100644
index 0000000..974abdc
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/attr/filters.pyi
@@ -0,0 +1,6 @@
+from typing import Any
+
+from . import Attribute, _FilterType
+
+def include(*what: type | str | Attribute[Any]) -> _FilterType[Any]: ...
+def exclude(*what: type | str | Attribute[Any]) -> _FilterType[Any]: ...
diff --git a/lambdas/aws-dd-forwarder-3.127.0/attr/py.typed b/lambdas/aws-dd-forwarder-3.127.0/attr/py.typed
new file mode 100644
index 0000000..e69de29
diff --git a/lambdas/aws-dd-forwarder-3.127.0/attr/setters.py b/lambdas/aws-dd-forwarder-3.127.0/attr/setters.py
new file mode 100644
index 0000000..a9ce016
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/attr/setters.py
@@ -0,0 +1,79 @@
+# SPDX-License-Identifier: MIT
+
+"""
+Commonly used hooks for on_setattr.
+"""
+
+from . import _config
+from .exceptions import FrozenAttributeError
+
+
+def pipe(*setters):
+ """
+ Run all *setters* and return the return value of the last one.
+
+ .. versionadded:: 20.1.0
+ """
+
+ def wrapped_pipe(instance, attrib, new_value):
+ rv = new_value
+
+ for setter in setters:
+ rv = setter(instance, attrib, rv)
+
+ return rv
+
+ return wrapped_pipe
+
+
+def frozen(_, __, ___):
+ """
+ Prevent an attribute to be modified.
+
+ .. versionadded:: 20.1.0
+ """
+ raise FrozenAttributeError()
+
+
+def validate(instance, attrib, new_value):
+ """
+ Run *attrib*'s validator on *new_value* if it has one.
+
+ .. versionadded:: 20.1.0
+ """
+ if _config._run_validators is False:
+ return new_value
+
+ v = attrib.validator
+ if not v:
+ return new_value
+
+ v(instance, attrib, new_value)
+
+ return new_value
+
+
+def convert(instance, attrib, new_value):
+ """
+ Run *attrib*'s converter -- if it has one -- on *new_value* and return the
+ result.
+
+ .. versionadded:: 20.1.0
+ """
+ c = attrib.converter
+ if c:
+ # This can be removed once we drop 3.8 and use attrs.Converter instead.
+ from ._make import Converter
+
+ if not isinstance(c, Converter):
+ return c(new_value)
+
+ return c(new_value, instance, attrib)
+
+ return new_value
+
+
+# Sentinel for disabling class-wide *on_setattr* hooks for certain attributes.
+# Sphinx's autodata stopped working, so the docstring is inlined in the API
+# docs.
+NO_OP = object()
diff --git a/lambdas/aws-dd-forwarder-3.127.0/attr/setters.pyi b/lambdas/aws-dd-forwarder-3.127.0/attr/setters.pyi
new file mode 100644
index 0000000..73abf36
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/attr/setters.pyi
@@ -0,0 +1,20 @@
+from typing import Any, NewType, NoReturn, TypeVar
+
+from . import Attribute
+from attrs import _OnSetAttrType
+
+_T = TypeVar("_T")
+
+def frozen(
+ instance: Any, attribute: Attribute[Any], new_value: Any
+) -> NoReturn: ...
+def pipe(*setters: _OnSetAttrType) -> _OnSetAttrType: ...
+def validate(instance: Any, attribute: Attribute[_T], new_value: _T) -> _T: ...
+
+# convert is allowed to return Any, because they can be chained using pipe.
+def convert(
+ instance: Any, attribute: Attribute[Any], new_value: Any
+) -> Any: ...
+
+_NoOpType = NewType("_NoOpType", object)
+NO_OP: _NoOpType
diff --git a/lambdas/aws-dd-forwarder-3.127.0/attr/validators.py b/lambdas/aws-dd-forwarder-3.127.0/attr/validators.py
new file mode 100644
index 0000000..8a56717
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/attr/validators.py
@@ -0,0 +1,711 @@
+# SPDX-License-Identifier: MIT
+
+"""
+Commonly useful validators.
+"""
+
+
+import operator
+import re
+
+from contextlib import contextmanager
+from re import Pattern
+
+from ._config import get_run_validators, set_run_validators
+from ._make import _AndValidator, and_, attrib, attrs
+from .converters import default_if_none
+from .exceptions import NotCallableError
+
+
+__all__ = [
+ "and_",
+ "deep_iterable",
+ "deep_mapping",
+ "disabled",
+ "ge",
+ "get_disabled",
+ "gt",
+ "in_",
+ "instance_of",
+ "is_callable",
+ "le",
+ "lt",
+ "matches_re",
+ "max_len",
+ "min_len",
+ "not_",
+ "optional",
+ "or_",
+ "set_disabled",
+]
+
+
+def set_disabled(disabled):
+ """
+ Globally disable or enable running validators.
+
+ By default, they are run.
+
+ Args:
+ disabled (bool): If `True`, disable running all validators.
+
+ .. warning::
+
+ This function is not thread-safe!
+
+ .. versionadded:: 21.3.0
+ """
+ set_run_validators(not disabled)
+
+
+def get_disabled():
+ """
+ Return a bool indicating whether validators are currently disabled or not.
+
+ Returns:
+ bool:`True` if validators are currently disabled.
+
+ .. versionadded:: 21.3.0
+ """
+ return not get_run_validators()
+
+
+@contextmanager
+def disabled():
+ """
+ Context manager that disables running validators within its context.
+
+ .. warning::
+
+ This context manager is not thread-safe!
+
+ .. versionadded:: 21.3.0
+ """
+ set_run_validators(False)
+ try:
+ yield
+ finally:
+ set_run_validators(True)
+
+
+@attrs(repr=False, slots=True, unsafe_hash=True)
+class _InstanceOfValidator:
+ type = attrib()
+
+ def __call__(self, inst, attr, value):
+ """
+ We use a callable class to be able to change the ``__repr__``.
+ """
+ if not isinstance(value, self.type):
+ msg = f"'{attr.name}' must be {self.type!r} (got {value!r} that is a {value.__class__!r})."
+ raise TypeError(
+ msg,
+ attr,
+ self.type,
+ value,
+ )
+
+ def __repr__(self):
+ return f""
+
+
+def instance_of(type):
+ """
+ A validator that raises a `TypeError` if the initializer is called with a
+ wrong type for this particular attribute (checks are performed using
+ `isinstance` therefore it's also valid to pass a tuple of types).
+
+ Args:
+ type (type | tuple[type]): The type to check for.
+
+ Raises:
+ TypeError:
+ With a human readable error message, the attribute (of type
+ `attrs.Attribute`), the expected type, and the value it got.
+ """
+ return _InstanceOfValidator(type)
+
+
+@attrs(repr=False, frozen=True, slots=True)
+class _MatchesReValidator:
+ pattern = attrib()
+ match_func = attrib()
+
+ def __call__(self, inst, attr, value):
+ """
+ We use a callable class to be able to change the ``__repr__``.
+ """
+ if not self.match_func(value):
+ msg = f"'{attr.name}' must match regex {self.pattern.pattern!r} ({value!r} doesn't)"
+ raise ValueError(
+ msg,
+ attr,
+ self.pattern,
+ value,
+ )
+
+ def __repr__(self):
+ return f""
+
+
+def matches_re(regex, flags=0, func=None):
+ r"""
+ A validator that raises `ValueError` if the initializer is called with a
+ string that doesn't match *regex*.
+
+ Args:
+ regex (str, re.Pattern):
+ A regex string or precompiled pattern to match against
+
+ flags (int):
+ Flags that will be passed to the underlying re function (default 0)
+
+ func (typing.Callable):
+ Which underlying `re` function to call. Valid options are
+ `re.fullmatch`, `re.search`, and `re.match`; the default `None`
+ means `re.fullmatch`. For performance reasons, the pattern is
+ always precompiled using `re.compile`.
+
+ .. versionadded:: 19.2.0
+ .. versionchanged:: 21.3.0 *regex* can be a pre-compiled pattern.
+ """
+ valid_funcs = (re.fullmatch, None, re.search, re.match)
+ if func not in valid_funcs:
+ msg = "'func' must be one of {}.".format(
+ ", ".join(
+ sorted(e and e.__name__ or "None" for e in set(valid_funcs))
+ )
+ )
+ raise ValueError(msg)
+
+ if isinstance(regex, Pattern):
+ if flags:
+ msg = "'flags' can only be used with a string pattern; pass flags to re.compile() instead"
+ raise TypeError(msg)
+ pattern = regex
+ else:
+ pattern = re.compile(regex, flags)
+
+ if func is re.match:
+ match_func = pattern.match
+ elif func is re.search:
+ match_func = pattern.search
+ else:
+ match_func = pattern.fullmatch
+
+ return _MatchesReValidator(pattern, match_func)
+
+
+@attrs(repr=False, slots=True, unsafe_hash=True)
+class _OptionalValidator:
+ validator = attrib()
+
+ def __call__(self, inst, attr, value):
+ if value is None:
+ return
+
+ self.validator(inst, attr, value)
+
+ def __repr__(self):
+ return f""
+
+
+def optional(validator):
+ """
+ A validator that makes an attribute optional. An optional attribute is one
+ which can be set to `None` in addition to satisfying the requirements of
+ the sub-validator.
+
+ Args:
+ validator
+ (typing.Callable | tuple[typing.Callable] | list[typing.Callable]):
+ A validator (or validators) that is used for non-`None` values.
+
+ .. versionadded:: 15.1.0
+ .. versionchanged:: 17.1.0 *validator* can be a list of validators.
+ .. versionchanged:: 23.1.0 *validator* can also be a tuple of validators.
+ """
+ if isinstance(validator, (list, tuple)):
+ return _OptionalValidator(_AndValidator(validator))
+
+ return _OptionalValidator(validator)
+
+
+@attrs(repr=False, slots=True, unsafe_hash=True)
+class _InValidator:
+ options = attrib()
+ _original_options = attrib(hash=False)
+
+ def __call__(self, inst, attr, value):
+ try:
+ in_options = value in self.options
+ except TypeError: # e.g. `1 in "abc"`
+ in_options = False
+
+ if not in_options:
+ msg = f"'{attr.name}' must be in {self._original_options!r} (got {value!r})"
+ raise ValueError(
+ msg,
+ attr,
+ self._original_options,
+ value,
+ )
+
+ def __repr__(self):
+ return f""
+
+
+def in_(options):
+ """
+ A validator that raises a `ValueError` if the initializer is called with a
+ value that does not belong in the *options* provided.
+
+ The check is performed using ``value in options``, so *options* has to
+ support that operation.
+
+ To keep the validator hashable, dicts, lists, and sets are transparently
+ transformed into a `tuple`.
+
+ Args:
+ options: Allowed options.
+
+ Raises:
+ ValueError:
+ With a human readable error message, the attribute (of type
+ `attrs.Attribute`), the expected options, and the value it got.
+
+ .. versionadded:: 17.1.0
+ .. versionchanged:: 22.1.0
+ The ValueError was incomplete until now and only contained the human
+ readable error message. Now it contains all the information that has
+ been promised since 17.1.0.
+ .. versionchanged:: 24.1.0
+ *options* that are a list, dict, or a set are now transformed into a
+ tuple to keep the validator hashable.
+ """
+ repr_options = options
+ if isinstance(options, (list, dict, set)):
+ options = tuple(options)
+
+ return _InValidator(options, repr_options)
+
+
+@attrs(repr=False, slots=False, unsafe_hash=True)
+class _IsCallableValidator:
+ def __call__(self, inst, attr, value):
+ """
+ We use a callable class to be able to change the ``__repr__``.
+ """
+ if not callable(value):
+ message = (
+ "'{name}' must be callable "
+ "(got {value!r} that is a {actual!r})."
+ )
+ raise NotCallableError(
+ msg=message.format(
+ name=attr.name, value=value, actual=value.__class__
+ ),
+ value=value,
+ )
+
+ def __repr__(self):
+ return ""
+
+
+def is_callable():
+ """
+ A validator that raises a `attrs.exceptions.NotCallableError` if the
+ initializer is called with a value for this particular attribute that is
+ not callable.
+
+ .. versionadded:: 19.1.0
+
+ Raises:
+ attrs.exceptions.NotCallableError:
+ With a human readable error message containing the attribute
+ (`attrs.Attribute`) name, and the value it got.
+ """
+ return _IsCallableValidator()
+
+
+@attrs(repr=False, slots=True, unsafe_hash=True)
+class _DeepIterable:
+ member_validator = attrib(validator=is_callable())
+ iterable_validator = attrib(
+ default=None, validator=optional(is_callable())
+ )
+
+ def __call__(self, inst, attr, value):
+ """
+ We use a callable class to be able to change the ``__repr__``.
+ """
+ if self.iterable_validator is not None:
+ self.iterable_validator(inst, attr, value)
+
+ for member in value:
+ self.member_validator(inst, attr, member)
+
+ def __repr__(self):
+ iterable_identifier = (
+ ""
+ if self.iterable_validator is None
+ else f" {self.iterable_validator!r}"
+ )
+ return (
+ f""
+ )
+
+
+def deep_iterable(member_validator, iterable_validator=None):
+ """
+ A validator that performs deep validation of an iterable.
+
+ Args:
+ member_validator: Validator to apply to iterable members.
+
+ iterable_validator:
+ Validator to apply to iterable itself (optional).
+
+ Raises
+ TypeError: if any sub-validators fail
+
+ .. versionadded:: 19.1.0
+ """
+ if isinstance(member_validator, (list, tuple)):
+ member_validator = and_(*member_validator)
+ return _DeepIterable(member_validator, iterable_validator)
+
+
+@attrs(repr=False, slots=True, unsafe_hash=True)
+class _DeepMapping:
+ key_validator = attrib(validator=is_callable())
+ value_validator = attrib(validator=is_callable())
+ mapping_validator = attrib(default=None, validator=optional(is_callable()))
+
+ def __call__(self, inst, attr, value):
+ """
+ We use a callable class to be able to change the ``__repr__``.
+ """
+ if self.mapping_validator is not None:
+ self.mapping_validator(inst, attr, value)
+
+ for key in value:
+ self.key_validator(inst, attr, key)
+ self.value_validator(inst, attr, value[key])
+
+ def __repr__(self):
+ return f""
+
+
+def deep_mapping(key_validator, value_validator, mapping_validator=None):
+ """
+ A validator that performs deep validation of a dictionary.
+
+ Args:
+ key_validator: Validator to apply to dictionary keys.
+
+ value_validator: Validator to apply to dictionary values.
+
+ mapping_validator:
+ Validator to apply to top-level mapping attribute (optional).
+
+ .. versionadded:: 19.1.0
+
+ Raises:
+ TypeError: if any sub-validators fail
+ """
+ return _DeepMapping(key_validator, value_validator, mapping_validator)
+
+
+@attrs(repr=False, frozen=True, slots=True)
+class _NumberValidator:
+ bound = attrib()
+ compare_op = attrib()
+ compare_func = attrib()
+
+ def __call__(self, inst, attr, value):
+ """
+ We use a callable class to be able to change the ``__repr__``.
+ """
+ if not self.compare_func(value, self.bound):
+ msg = f"'{attr.name}' must be {self.compare_op} {self.bound}: {value}"
+ raise ValueError(msg)
+
+ def __repr__(self):
+ return f""
+
+
+def lt(val):
+ """
+ A validator that raises `ValueError` if the initializer is called with a
+ number larger or equal to *val*.
+
+ The validator uses `operator.lt` to compare the values.
+
+ Args:
+ val: Exclusive upper bound for values.
+
+ .. versionadded:: 21.3.0
+ """
+ return _NumberValidator(val, "<", operator.lt)
+
+
+def le(val):
+ """
+ A validator that raises `ValueError` if the initializer is called with a
+ number greater than *val*.
+
+ The validator uses `operator.le` to compare the values.
+
+ Args:
+ val: Inclusive upper bound for values.
+
+ .. versionadded:: 21.3.0
+ """
+ return _NumberValidator(val, "<=", operator.le)
+
+
+def ge(val):
+ """
+ A validator that raises `ValueError` if the initializer is called with a
+ number smaller than *val*.
+
+ The validator uses `operator.ge` to compare the values.
+
+ Args:
+ val: Inclusive lower bound for values
+
+ .. versionadded:: 21.3.0
+ """
+ return _NumberValidator(val, ">=", operator.ge)
+
+
+def gt(val):
+ """
+ A validator that raises `ValueError` if the initializer is called with a
+ number smaller or equal to *val*.
+
+ The validator uses `operator.ge` to compare the values.
+
+ Args:
+ val: Exclusive lower bound for values
+
+ .. versionadded:: 21.3.0
+ """
+ return _NumberValidator(val, ">", operator.gt)
+
+
+@attrs(repr=False, frozen=True, slots=True)
+class _MaxLengthValidator:
+ max_length = attrib()
+
+ def __call__(self, inst, attr, value):
+ """
+ We use a callable class to be able to change the ``__repr__``.
+ """
+ if len(value) > self.max_length:
+ msg = f"Length of '{attr.name}' must be <= {self.max_length}: {len(value)}"
+ raise ValueError(msg)
+
+ def __repr__(self):
+ return f""
+
+
+def max_len(length):
+ """
+ A validator that raises `ValueError` if the initializer is called
+ with a string or iterable that is longer than *length*.
+
+ Args:
+ length (int): Maximum length of the string or iterable
+
+ .. versionadded:: 21.3.0
+ """
+ return _MaxLengthValidator(length)
+
+
+@attrs(repr=False, frozen=True, slots=True)
+class _MinLengthValidator:
+ min_length = attrib()
+
+ def __call__(self, inst, attr, value):
+ """
+ We use a callable class to be able to change the ``__repr__``.
+ """
+ if len(value) < self.min_length:
+ msg = f"Length of '{attr.name}' must be >= {self.min_length}: {len(value)}"
+ raise ValueError(msg)
+
+ def __repr__(self):
+ return f""
+
+
+def min_len(length):
+ """
+ A validator that raises `ValueError` if the initializer is called
+ with a string or iterable that is shorter than *length*.
+
+ Args:
+ length (int): Minimum length of the string or iterable
+
+ .. versionadded:: 22.1.0
+ """
+ return _MinLengthValidator(length)
+
+
+@attrs(repr=False, slots=True, unsafe_hash=True)
+class _SubclassOfValidator:
+ type = attrib()
+
+ def __call__(self, inst, attr, value):
+ """
+ We use a callable class to be able to change the ``__repr__``.
+ """
+ if not issubclass(value, self.type):
+ msg = f"'{attr.name}' must be a subclass of {self.type!r} (got {value!r})."
+ raise TypeError(
+ msg,
+ attr,
+ self.type,
+ value,
+ )
+
+ def __repr__(self):
+ return f""
+
+
+def _subclass_of(type):
+ """
+ A validator that raises a `TypeError` if the initializer is called with a
+ wrong type for this particular attribute (checks are performed using
+ `issubclass` therefore it's also valid to pass a tuple of types).
+
+ Args:
+ type (type | tuple[type, ...]): The type(s) to check for.
+
+ Raises:
+ TypeError:
+ With a human readable error message, the attribute (of type
+ `attrs.Attribute`), the expected type, and the value it got.
+ """
+ return _SubclassOfValidator(type)
+
+
+@attrs(repr=False, slots=True, unsafe_hash=True)
+class _NotValidator:
+ validator = attrib()
+ msg = attrib(
+ converter=default_if_none(
+ "not_ validator child '{validator!r}' "
+ "did not raise a captured error"
+ )
+ )
+ exc_types = attrib(
+ validator=deep_iterable(
+ member_validator=_subclass_of(Exception),
+ iterable_validator=instance_of(tuple),
+ ),
+ )
+
+ def __call__(self, inst, attr, value):
+ try:
+ self.validator(inst, attr, value)
+ except self.exc_types:
+ pass # suppress error to invert validity
+ else:
+ raise ValueError(
+ self.msg.format(
+ validator=self.validator,
+ exc_types=self.exc_types,
+ ),
+ attr,
+ self.validator,
+ value,
+ self.exc_types,
+ )
+
+ def __repr__(self):
+ return f""
+
+
+def not_(validator, *, msg=None, exc_types=(ValueError, TypeError)):
+ """
+ A validator that wraps and logically 'inverts' the validator passed to it.
+ It will raise a `ValueError` if the provided validator *doesn't* raise a
+ `ValueError` or `TypeError` (by default), and will suppress the exception
+ if the provided validator *does*.
+
+ Intended to be used with existing validators to compose logic without
+ needing to create inverted variants, for example, ``not_(in_(...))``.
+
+ Args:
+ validator: A validator to be logically inverted.
+
+ msg (str):
+ Message to raise if validator fails. Formatted with keys
+ ``exc_types`` and ``validator``.
+
+ exc_types (tuple[type, ...]):
+ Exception type(s) to capture. Other types raised by child
+ validators will not be intercepted and pass through.
+
+ Raises:
+ ValueError:
+ With a human readable error message, the attribute (of type
+ `attrs.Attribute`), the validator that failed to raise an
+ exception, the value it got, and the expected exception types.
+
+ .. versionadded:: 22.2.0
+ """
+ try:
+ exc_types = tuple(exc_types)
+ except TypeError:
+ exc_types = (exc_types,)
+ return _NotValidator(validator, msg, exc_types)
+
+
+@attrs(repr=False, slots=True, unsafe_hash=True)
+class _OrValidator:
+ validators = attrib()
+
+ def __call__(self, inst, attr, value):
+ for v in self.validators:
+ try:
+ v(inst, attr, value)
+ except Exception: # noqa: BLE001, PERF203, S112
+ continue
+ else:
+ return
+
+ msg = f"None of {self.validators!r} satisfied for value {value!r}"
+ raise ValueError(msg)
+
+ def __repr__(self):
+ return f""
+
+
+def or_(*validators):
+ """
+ A validator that composes multiple validators into one.
+
+ When called on a value, it runs all wrapped validators until one of them is
+ satisfied.
+
+ Args:
+ validators (~collections.abc.Iterable[typing.Callable]):
+ Arbitrary number of validators.
+
+ Raises:
+ ValueError:
+ If no validator is satisfied. Raised with a human-readable error
+ message listing all the wrapped validators and the value that
+ failed all of them.
+
+ .. versionadded:: 24.1.0
+ """
+ vals = []
+ for v in validators:
+ vals.extend(v.validators if isinstance(v, _OrValidator) else [v])
+
+ return _OrValidator(tuple(vals))
diff --git a/lambdas/aws-dd-forwarder-3.127.0/attr/validators.pyi b/lambdas/aws-dd-forwarder-3.127.0/attr/validators.pyi
new file mode 100644
index 0000000..a314110
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/attr/validators.pyi
@@ -0,0 +1,83 @@
+from typing import (
+ Any,
+ AnyStr,
+ Callable,
+ Container,
+ ContextManager,
+ Iterable,
+ Mapping,
+ Match,
+ Pattern,
+ TypeVar,
+ overload,
+)
+
+from attrs import _ValidatorType
+from attrs import _ValidatorArgType
+
+_T = TypeVar("_T")
+_T1 = TypeVar("_T1")
+_T2 = TypeVar("_T2")
+_T3 = TypeVar("_T3")
+_I = TypeVar("_I", bound=Iterable)
+_K = TypeVar("_K")
+_V = TypeVar("_V")
+_M = TypeVar("_M", bound=Mapping)
+
+def set_disabled(run: bool) -> None: ...
+def get_disabled() -> bool: ...
+def disabled() -> ContextManager[None]: ...
+
+# To be more precise on instance_of use some overloads.
+# If there are more than 3 items in the tuple then we fall back to Any
+@overload
+def instance_of(type: type[_T]) -> _ValidatorType[_T]: ...
+@overload
+def instance_of(type: tuple[type[_T]]) -> _ValidatorType[_T]: ...
+@overload
+def instance_of(
+ type: tuple[type[_T1], type[_T2]]
+) -> _ValidatorType[_T1 | _T2]: ...
+@overload
+def instance_of(
+ type: tuple[type[_T1], type[_T2], type[_T3]]
+) -> _ValidatorType[_T1 | _T2 | _T3]: ...
+@overload
+def instance_of(type: tuple[type, ...]) -> _ValidatorType[Any]: ...
+def optional(
+ validator: (
+ _ValidatorType[_T]
+ | list[_ValidatorType[_T]]
+ | tuple[_ValidatorType[_T]]
+ ),
+) -> _ValidatorType[_T | None]: ...
+def in_(options: Container[_T]) -> _ValidatorType[_T]: ...
+def and_(*validators: _ValidatorType[_T]) -> _ValidatorType[_T]: ...
+def matches_re(
+ regex: Pattern[AnyStr] | AnyStr,
+ flags: int = ...,
+ func: Callable[[AnyStr, AnyStr, int], Match[AnyStr] | None] | None = ...,
+) -> _ValidatorType[AnyStr]: ...
+def deep_iterable(
+ member_validator: _ValidatorArgType[_T],
+ iterable_validator: _ValidatorType[_I] | None = ...,
+) -> _ValidatorType[_I]: ...
+def deep_mapping(
+ key_validator: _ValidatorType[_K],
+ value_validator: _ValidatorType[_V],
+ mapping_validator: _ValidatorType[_M] | None = ...,
+) -> _ValidatorType[_M]: ...
+def is_callable() -> _ValidatorType[_T]: ...
+def lt(val: _T) -> _ValidatorType[_T]: ...
+def le(val: _T) -> _ValidatorType[_T]: ...
+def ge(val: _T) -> _ValidatorType[_T]: ...
+def gt(val: _T) -> _ValidatorType[_T]: ...
+def max_len(length: int) -> _ValidatorType[_T]: ...
+def min_len(length: int) -> _ValidatorType[_T]: ...
+def not_(
+ validator: _ValidatorType[_T],
+ *,
+ msg: str | None = None,
+ exc_types: type[Exception] | Iterable[type[Exception]] = ...,
+) -> _ValidatorType[_T]: ...
+def or_(*validators: _ValidatorType[_T]) -> _ValidatorType[_T]: ...
diff --git a/lambdas/aws-dd-forwarder-3.127.0/attrs-24.2.0.dist-info/INSTALLER b/lambdas/aws-dd-forwarder-3.127.0/attrs-24.2.0.dist-info/INSTALLER
new file mode 100644
index 0000000..a1b589e
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/attrs-24.2.0.dist-info/INSTALLER
@@ -0,0 +1 @@
+pip
diff --git a/lambdas/aws-dd-forwarder-3.127.0/attrs-24.2.0.dist-info/METADATA b/lambdas/aws-dd-forwarder-3.127.0/attrs-24.2.0.dist-info/METADATA
new file mode 100644
index 0000000..a85b378
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/attrs-24.2.0.dist-info/METADATA
@@ -0,0 +1,242 @@
+Metadata-Version: 2.3
+Name: attrs
+Version: 24.2.0
+Summary: Classes Without Boilerplate
+Project-URL: Documentation, https://www.attrs.org/
+Project-URL: Changelog, https://www.attrs.org/en/stable/changelog.html
+Project-URL: GitHub, https://github.com/python-attrs/attrs
+Project-URL: Funding, https://github.com/sponsors/hynek
+Project-URL: Tidelift, https://tidelift.com/subscription/pkg/pypi-attrs?utm_source=pypi-attrs&utm_medium=pypi
+Author-email: Hynek Schlawack
+License-Expression: MIT
+License-File: LICENSE
+Keywords: attribute,boilerplate,class
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: License :: OSI Approved :: MIT License
+Classifier: Programming Language :: Python :: 3.7
+Classifier: Programming Language :: Python :: 3.8
+Classifier: Programming Language :: Python :: 3.9
+Classifier: Programming Language :: Python :: 3.10
+Classifier: Programming Language :: Python :: 3.11
+Classifier: Programming Language :: Python :: 3.12
+Classifier: Programming Language :: Python :: 3.13
+Classifier: Programming Language :: Python :: Implementation :: CPython
+Classifier: Programming Language :: Python :: Implementation :: PyPy
+Classifier: Typing :: Typed
+Requires-Python: >=3.7
+Requires-Dist: importlib-metadata; python_version < '3.8'
+Provides-Extra: benchmark
+Requires-Dist: cloudpickle; (platform_python_implementation == 'CPython') and extra == 'benchmark'
+Requires-Dist: hypothesis; extra == 'benchmark'
+Requires-Dist: mypy>=1.11.1; (platform_python_implementation == 'CPython' and python_version >= '3.9') and extra == 'benchmark'
+Requires-Dist: pympler; extra == 'benchmark'
+Requires-Dist: pytest-codspeed; extra == 'benchmark'
+Requires-Dist: pytest-mypy-plugins; (platform_python_implementation == 'CPython' and python_version >= '3.9' and python_version < '3.13') and extra == 'benchmark'
+Requires-Dist: pytest-xdist[psutil]; extra == 'benchmark'
+Requires-Dist: pytest>=4.3.0; extra == 'benchmark'
+Provides-Extra: cov
+Requires-Dist: cloudpickle; (platform_python_implementation == 'CPython') and extra == 'cov'
+Requires-Dist: coverage[toml]>=5.3; extra == 'cov'
+Requires-Dist: hypothesis; extra == 'cov'
+Requires-Dist: mypy>=1.11.1; (platform_python_implementation == 'CPython' and python_version >= '3.9') and extra == 'cov'
+Requires-Dist: pympler; extra == 'cov'
+Requires-Dist: pytest-mypy-plugins; (platform_python_implementation == 'CPython' and python_version >= '3.9' and python_version < '3.13') and extra == 'cov'
+Requires-Dist: pytest-xdist[psutil]; extra == 'cov'
+Requires-Dist: pytest>=4.3.0; extra == 'cov'
+Provides-Extra: dev
+Requires-Dist: cloudpickle; (platform_python_implementation == 'CPython') and extra == 'dev'
+Requires-Dist: hypothesis; extra == 'dev'
+Requires-Dist: mypy>=1.11.1; (platform_python_implementation == 'CPython' and python_version >= '3.9') and extra == 'dev'
+Requires-Dist: pre-commit; extra == 'dev'
+Requires-Dist: pympler; extra == 'dev'
+Requires-Dist: pytest-mypy-plugins; (platform_python_implementation == 'CPython' and python_version >= '3.9' and python_version < '3.13') and extra == 'dev'
+Requires-Dist: pytest-xdist[psutil]; extra == 'dev'
+Requires-Dist: pytest>=4.3.0; extra == 'dev'
+Provides-Extra: docs
+Requires-Dist: cogapp; extra == 'docs'
+Requires-Dist: furo; extra == 'docs'
+Requires-Dist: myst-parser; extra == 'docs'
+Requires-Dist: sphinx; extra == 'docs'
+Requires-Dist: sphinx-notfound-page; extra == 'docs'
+Requires-Dist: sphinxcontrib-towncrier; extra == 'docs'
+Requires-Dist: towncrier<24.7; extra == 'docs'
+Provides-Extra: tests
+Requires-Dist: cloudpickle; (platform_python_implementation == 'CPython') and extra == 'tests'
+Requires-Dist: hypothesis; extra == 'tests'
+Requires-Dist: mypy>=1.11.1; (platform_python_implementation == 'CPython' and python_version >= '3.9') and extra == 'tests'
+Requires-Dist: pympler; extra == 'tests'
+Requires-Dist: pytest-mypy-plugins; (platform_python_implementation == 'CPython' and python_version >= '3.9' and python_version < '3.13') and extra == 'tests'
+Requires-Dist: pytest-xdist[psutil]; extra == 'tests'
+Requires-Dist: pytest>=4.3.0; extra == 'tests'
+Provides-Extra: tests-mypy
+Requires-Dist: mypy>=1.11.1; (platform_python_implementation == 'CPython' and python_version >= '3.9') and extra == 'tests-mypy'
+Requires-Dist: pytest-mypy-plugins; (platform_python_implementation == 'CPython' and python_version >= '3.9' and python_version < '3.13') and extra == 'tests-mypy'
+Description-Content-Type: text/markdown
+
+
+
+
+
+
+
+
+*attrs* is the Python package that will bring back the **joy** of **writing classes** by relieving you from the drudgery of implementing object protocols (aka [dunder methods](https://www.attrs.org/en/latest/glossary.html#term-dunder-methods)).
+[Trusted by NASA](https://docs.github.com/en/account-and-profile/setting-up-and-managing-your-github-profile/customizing-your-profile/personalizing-your-profile#list-of-qualifying-repositories-for-mars-2020-helicopter-contributor-achievement) for Mars missions since 2020!
+
+Its main goal is to help you to write **concise** and **correct** software without slowing down your code.
+
+
+## Sponsors
+
+*attrs* would not be possible without our [amazing sponsors](https://github.com/sponsors/hynek).
+Especially those generously supporting us at the *The Organization* tier and higher:
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Please consider joining them to help make attrs’s maintenance more sustainable!
+
+
+
+
+## Example
+
+*attrs* gives you a class decorator and a way to declaratively define the attributes on that class:
+
+
+
+```pycon
+>>> from attrs import asdict, define, make_class, Factory
+
+>>> @define
+... class SomeClass:
+... a_number: int = 42
+... list_of_numbers: list[int] = Factory(list)
+...
+... def hard_math(self, another_number):
+... return self.a_number + sum(self.list_of_numbers) * another_number
+
+
+>>> sc = SomeClass(1, [1, 2, 3])
+>>> sc
+SomeClass(a_number=1, list_of_numbers=[1, 2, 3])
+
+>>> sc.hard_math(3)
+19
+>>> sc == SomeClass(1, [1, 2, 3])
+True
+>>> sc != SomeClass(2, [3, 2, 1])
+True
+
+>>> asdict(sc)
+{'a_number': 1, 'list_of_numbers': [1, 2, 3]}
+
+>>> SomeClass()
+SomeClass(a_number=42, list_of_numbers=[])
+
+>>> C = make_class("C", ["a", "b"])
+>>> C("foo", "bar")
+C(a='foo', b='bar')
+```
+
+After *declaring* your attributes, *attrs* gives you:
+
+- a concise and explicit overview of the class's attributes,
+- a nice human-readable `__repr__`,
+- equality-checking methods,
+- an initializer,
+- and much more,
+
+*without* writing dull boilerplate code again and again and *without* runtime performance penalties.
+
+---
+
+This example uses *attrs*'s modern APIs that have been introduced in version 20.1.0, and the *attrs* package import name that has been added in version 21.3.0.
+The classic APIs (`@attr.s`, `attr.ib`, plus their serious-business aliases) and the `attr` package import name will remain **indefinitely**.
+
+Check out [*On The Core API Names*](https://www.attrs.org/en/latest/names.html) for an in-depth explanation!
+
+
+### Hate Type Annotations!?
+
+No problem!
+Types are entirely **optional** with *attrs*.
+Simply assign `attrs.field()` to the attributes instead of annotating them with types:
+
+```python
+from attrs import define, field
+
+@define
+class SomeClass:
+ a_number = field(default=42)
+ list_of_numbers = field(factory=list)
+```
+
+
+## Data Classes
+
+On the tin, *attrs* might remind you of `dataclasses` (and indeed, `dataclasses` [are a descendant](https://hynek.me/articles/import-attrs/) of *attrs*).
+In practice it does a lot more and is more flexible.
+For instance, it allows you to define [special handling of NumPy arrays for equality checks](https://www.attrs.org/en/stable/comparison.html#customization), allows more ways to [plug into the initialization process](https://www.attrs.org/en/stable/init.html#hooking-yourself-into-initialization), has a replacement for `__init_subclass__`, and allows for stepping through the generated methods using a debugger.
+
+For more details, please refer to our [comparison page](https://www.attrs.org/en/stable/why.html#data-classes), but generally speaking, we are more likely to commit crimes against nature to make things work that one would expect to work, but that are quite complicated in practice.
+
+
+## Project Information
+
+- [**Changelog**](https://www.attrs.org/en/stable/changelog.html)
+- [**Documentation**](https://www.attrs.org/)
+- [**PyPI**](https://pypi.org/project/attrs/)
+- [**Source Code**](https://github.com/python-attrs/attrs)
+- [**Contributing**](https://github.com/python-attrs/attrs/blob/main/.github/CONTRIBUTING.md)
+- [**Third-party Extensions**](https://github.com/python-attrs/attrs/wiki/Extensions-to-attrs)
+- **Get Help**: use the `python-attrs` tag on [Stack Overflow](https://stackoverflow.com/questions/tagged/python-attrs)
+
+
+### *attrs* for Enterprise
+
+Available as part of the Tidelift Subscription.
+
+The maintainers of *attrs* and thousands of other packages are working with Tidelift to deliver commercial support and maintenance for the open source packages you use to build your applications.
+Save time, reduce risk, and improve code health, while paying the maintainers of the exact packages you use.
+[Learn more](https://tidelift.com/?utm_source=lifter&utm_medium=referral&utm_campaign=hynek).
+
+## Release Information
+
+### Deprecations
+
+- Given the amount of warnings raised in the broader ecosystem, we've decided to only soft-deprecate the *hash* argument to `@define` / `@attr.s`.
+ Please don't use it in new code, but we don't intend to remove it anymore.
+ [#1330](https://github.com/python-attrs/attrs/issues/1330)
+
+
+### Changes
+
+- `attrs.converters.pipe()` (and its syntactic sugar of passing a list for `attrs.field()`'s / `attr.ib()`'s *converter* argument) works again when passing `attrs.setters.convert` to *on_setattr* (which is default for `attrs.define`).
+ [#1328](https://github.com/python-attrs/attrs/issues/1328)
+- Restored support for PEP [649](https://peps.python.org/pep-0649/) / [749](https://peps.python.org/pep-0749/)-implementing Pythons -- currently 3.14-dev.
+ [#1329](https://github.com/python-attrs/attrs/issues/1329)
+
+
+
+---
+
+[Full changelog →](https://www.attrs.org/en/stable/changelog.html)
diff --git a/lambdas/aws-dd-forwarder-3.127.0/attrs-24.2.0.dist-info/RECORD b/lambdas/aws-dd-forwarder-3.127.0/attrs-24.2.0.dist-info/RECORD
new file mode 100644
index 0000000..2a91231
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/attrs-24.2.0.dist-info/RECORD
@@ -0,0 +1,56 @@
+attr/__init__.py,sha256=l8Ewh5KZE7CCY0i1iDfSCnFiUTIkBVoqsXjX9EZnIVA,2087
+attr/__init__.pyi,sha256=aTVHBPX6krCGvbQvOl_UKqEzmi2HFsaIVm2WKmAiqVs,11434
+attr/__pycache__/__init__.cpython-311.pyc,,
+attr/__pycache__/_cmp.cpython-311.pyc,,
+attr/__pycache__/_compat.cpython-311.pyc,,
+attr/__pycache__/_config.cpython-311.pyc,,
+attr/__pycache__/_funcs.cpython-311.pyc,,
+attr/__pycache__/_make.cpython-311.pyc,,
+attr/__pycache__/_next_gen.cpython-311.pyc,,
+attr/__pycache__/_version_info.cpython-311.pyc,,
+attr/__pycache__/converters.cpython-311.pyc,,
+attr/__pycache__/exceptions.cpython-311.pyc,,
+attr/__pycache__/filters.cpython-311.pyc,,
+attr/__pycache__/setters.cpython-311.pyc,,
+attr/__pycache__/validators.cpython-311.pyc,,
+attr/_cmp.py,sha256=3umHiBtgsEYtvNP_8XrQwTCdFoZIX4DEur76N-2a3X8,4123
+attr/_cmp.pyi,sha256=U-_RU_UZOyPUEQzXE6RMYQQcjkZRY25wTH99sN0s7MM,368
+attr/_compat.py,sha256=n2Uk3c-ywv0PkFfGlvqR7SzDXp4NOhWmNV_ZK6YfWoM,2958
+attr/_config.py,sha256=z81Vt-GeT_2taxs1XZfmHx9TWlSxjPb6eZH1LTGsS54,843
+attr/_funcs.py,sha256=SGDmNlED1TM3tgO9Ap2mfRfVI24XEAcrNQs7o2eBXHQ,17386
+attr/_make.py,sha256=BjENJz5eJoojJVbCoupWjXLLEZJ7VID89lisLbQUlmQ,91479
+attr/_next_gen.py,sha256=dhGb96VFg4kXBkS9Zdz1A2uxVJ99q_RT1hw3kLA9-uI,24630
+attr/_typing_compat.pyi,sha256=XDP54TUn-ZKhD62TOQebmzrwFyomhUCoGRpclb6alRA,469
+attr/_version_info.py,sha256=exSqb3b5E-fMSsgZAlEw9XcLpEgobPORCZpcaEglAM4,2121
+attr/_version_info.pyi,sha256=x_M3L3WuB7r_ULXAWjx959udKQ4HLB8l-hsc1FDGNvk,209
+attr/converters.py,sha256=vNa58pZi9V6uxBzl4t1QrHbQfkT4iRFAodyXe7lcgg0,3506
+attr/converters.pyi,sha256=mpDoVFO3Cpx8xYSSV0iZFl7IAHuoNBglxKfxHvLj_sY,410
+attr/exceptions.py,sha256=HRFq4iybmv7-DcZwyjl6M1euM2YeJVK_hFxuaBGAngI,1977
+attr/exceptions.pyi,sha256=zZq8bCUnKAy9mDtBEw42ZhPhAUIHoTKedDQInJD883M,539
+attr/filters.py,sha256=ZBiKWLp3R0LfCZsq7X11pn9WX8NslS2wXM4jsnLOGc8,1795
+attr/filters.pyi,sha256=3J5BG-dTxltBk1_-RuNRUHrv2qu1v8v4aDNAQ7_mifA,208
+attr/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+attr/setters.py,sha256=faMQeiBo_nbXYnPaQ1pq8PXeA7Zr-uNsVsPMiKCmxhc,1619
+attr/setters.pyi,sha256=NnVkaFU1BB4JB8E4JuXyrzTUgvtMpj8p3wBdJY7uix4,584
+attr/validators.py,sha256=985eTP6RHyon61YEauMJgyNy1rEOhJWiSXMJgRxPtrQ,20045
+attr/validators.pyi,sha256=LjKf7AoXZfvGSfT3LRs61Qfln94konYyMUPoJJjOxK4,2502
+attrs-24.2.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
+attrs-24.2.0.dist-info/METADATA,sha256=3Jgk4lr9Y1SAqAcwOLPN_mpW0wc6VOGm-yHt1LsPIHw,11524
+attrs-24.2.0.dist-info/RECORD,,
+attrs-24.2.0.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+attrs-24.2.0.dist-info/WHEEL,sha256=1yFddiXMmvYK7QYTqtRNtX66WJ0Mz8PYEiEUoOUUxRY,87
+attrs-24.2.0.dist-info/licenses/LICENSE,sha256=iCEVyV38KvHutnFPjsbVy8q_Znyv-HKfQkINpj9xTp8,1109
+attrs/__init__.py,sha256=5FHo-EMFOX-g4ialSK4fwOjuoHzLISJDZCwoOl02Ty8,1071
+attrs/__init__.pyi,sha256=o3l92VsD9kHz8sldEtb_tllBTs3TeL-vIBMTxo2Zc_4,7703
+attrs/__pycache__/__init__.cpython-311.pyc,,
+attrs/__pycache__/converters.cpython-311.pyc,,
+attrs/__pycache__/exceptions.cpython-311.pyc,,
+attrs/__pycache__/filters.cpython-311.pyc,,
+attrs/__pycache__/setters.cpython-311.pyc,,
+attrs/__pycache__/validators.cpython-311.pyc,,
+attrs/converters.py,sha256=8kQljrVwfSTRu8INwEk8SI0eGrzmWftsT7rM0EqyohM,76
+attrs/exceptions.py,sha256=ACCCmg19-vDFaDPY9vFl199SPXCQMN_bENs4DALjzms,76
+attrs/filters.py,sha256=VOUMZug9uEU6dUuA0dF1jInUK0PL3fLgP0VBS5d-CDE,73
+attrs/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+attrs/setters.py,sha256=eL1YidYQV3T2h9_SYIZSZR1FAcHGb1TuCTy0E0Lv2SU,73
+attrs/validators.py,sha256=xcy6wD5TtTkdCG1f4XWbocPSO0faBjk5IfVJfP6SUj0,76
diff --git a/lambdas/aws-dd-forwarder-3.127.0/attrs-24.2.0.dist-info/REQUESTED b/lambdas/aws-dd-forwarder-3.127.0/attrs-24.2.0.dist-info/REQUESTED
new file mode 100644
index 0000000..e69de29
diff --git a/lambdas/aws-dd-forwarder-3.127.0/attrs-24.2.0.dist-info/WHEEL b/lambdas/aws-dd-forwarder-3.127.0/attrs-24.2.0.dist-info/WHEEL
new file mode 100644
index 0000000..cdd68a4
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/attrs-24.2.0.dist-info/WHEEL
@@ -0,0 +1,4 @@
+Wheel-Version: 1.0
+Generator: hatchling 1.25.0
+Root-Is-Purelib: true
+Tag: py3-none-any
diff --git a/lambdas/aws-dd-forwarder-3.127.0/attrs-24.2.0.dist-info/licenses/LICENSE b/lambdas/aws-dd-forwarder-3.127.0/attrs-24.2.0.dist-info/licenses/LICENSE
new file mode 100644
index 0000000..2bd6453
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/attrs-24.2.0.dist-info/licenses/LICENSE
@@ -0,0 +1,21 @@
+The MIT License (MIT)
+
+Copyright (c) 2015 Hynek Schlawack and the attrs contributors
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
diff --git a/lambdas/aws-dd-forwarder-3.127.0/attrs/__init__.py b/lambdas/aws-dd-forwarder-3.127.0/attrs/__init__.py
new file mode 100644
index 0000000..963b197
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/attrs/__init__.py
@@ -0,0 +1,67 @@
+# SPDX-License-Identifier: MIT
+
+from attr import (
+ NOTHING,
+ Attribute,
+ AttrsInstance,
+ Converter,
+ Factory,
+ _make_getattr,
+ assoc,
+ cmp_using,
+ define,
+ evolve,
+ field,
+ fields,
+ fields_dict,
+ frozen,
+ has,
+ make_class,
+ mutable,
+ resolve_types,
+ validate,
+)
+from attr._next_gen import asdict, astuple
+
+from . import converters, exceptions, filters, setters, validators
+
+
+__all__ = [
+ "__author__",
+ "__copyright__",
+ "__description__",
+ "__doc__",
+ "__email__",
+ "__license__",
+ "__title__",
+ "__url__",
+ "__version__",
+ "__version_info__",
+ "asdict",
+ "assoc",
+ "astuple",
+ "Attribute",
+ "AttrsInstance",
+ "cmp_using",
+ "Converter",
+ "converters",
+ "define",
+ "evolve",
+ "exceptions",
+ "Factory",
+ "field",
+ "fields_dict",
+ "fields",
+ "filters",
+ "frozen",
+ "has",
+ "make_class",
+ "mutable",
+ "NOTHING",
+ "resolve_types",
+ "setters",
+ "validate",
+ "validators",
+]
+
+__getattr__ = _make_getattr(__name__)
diff --git a/lambdas/aws-dd-forwarder-3.127.0/attrs/__init__.pyi b/lambdas/aws-dd-forwarder-3.127.0/attrs/__init__.pyi
new file mode 100644
index 0000000..b2670de
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/attrs/__init__.pyi
@@ -0,0 +1,252 @@
+import sys
+
+from typing import (
+ Any,
+ Callable,
+ Mapping,
+ Sequence,
+ overload,
+ TypeVar,
+)
+
+# Because we need to type our own stuff, we have to make everything from
+# attr explicitly public too.
+from attr import __author__ as __author__
+from attr import __copyright__ as __copyright__
+from attr import __description__ as __description__
+from attr import __email__ as __email__
+from attr import __license__ as __license__
+from attr import __title__ as __title__
+from attr import __url__ as __url__
+from attr import __version__ as __version__
+from attr import __version_info__ as __version_info__
+from attr import assoc as assoc
+from attr import Attribute as Attribute
+from attr import AttrsInstance as AttrsInstance
+from attr import cmp_using as cmp_using
+from attr import converters as converters
+from attr import Converter as Converter
+from attr import evolve as evolve
+from attr import exceptions as exceptions
+from attr import Factory as Factory
+from attr import fields as fields
+from attr import fields_dict as fields_dict
+from attr import filters as filters
+from attr import has as has
+from attr import make_class as make_class
+from attr import NOTHING as NOTHING
+from attr import resolve_types as resolve_types
+from attr import setters as setters
+from attr import validate as validate
+from attr import validators as validators
+from attr import attrib, asdict as asdict, astuple as astuple
+
+if sys.version_info >= (3, 11):
+ from typing import dataclass_transform
+else:
+ from typing_extensions import dataclass_transform
+
+_T = TypeVar("_T")
+_C = TypeVar("_C", bound=type)
+
+_EqOrderType = bool | Callable[[Any], Any]
+_ValidatorType = Callable[[Any, "Attribute[_T]", _T], Any]
+_ConverterType = Callable[[Any], Any]
+_ReprType = Callable[[Any], str]
+_ReprArgType = bool | _ReprType
+_OnSetAttrType = Callable[[Any, "Attribute[Any]", Any], Any]
+_OnSetAttrArgType = _OnSetAttrType | list[_OnSetAttrType] | setters._NoOpType
+_FieldTransformer = Callable[
+ [type, list["Attribute[Any]"]], list["Attribute[Any]"]
+]
+# FIXME: in reality, if multiple validators are passed they must be in a list
+# or tuple, but those are invariant and so would prevent subtypes of
+# _ValidatorType from working when passed in a list or tuple.
+_ValidatorArgType = _ValidatorType[_T] | Sequence[_ValidatorType[_T]]
+
+@overload
+def field(
+ *,
+ default: None = ...,
+ validator: None = ...,
+ repr: _ReprArgType = ...,
+ hash: bool | None = ...,
+ init: bool = ...,
+ metadata: Mapping[Any, Any] | None = ...,
+ converter: None = ...,
+ factory: None = ...,
+ kw_only: bool = ...,
+ eq: bool | None = ...,
+ order: bool | None = ...,
+ on_setattr: _OnSetAttrArgType | None = ...,
+ alias: str | None = ...,
+ type: type | None = ...,
+) -> Any: ...
+
+# This form catches an explicit None or no default and infers the type from the
+# other arguments.
+@overload
+def field(
+ *,
+ default: None = ...,
+ validator: _ValidatorArgType[_T] | None = ...,
+ repr: _ReprArgType = ...,
+ hash: bool | None = ...,
+ init: bool = ...,
+ metadata: Mapping[Any, Any] | None = ...,
+ converter: _ConverterType | Converter[Any, _T] | None = ...,
+ factory: Callable[[], _T] | None = ...,
+ kw_only: bool = ...,
+ eq: _EqOrderType | None = ...,
+ order: _EqOrderType | None = ...,
+ on_setattr: _OnSetAttrArgType | None = ...,
+ alias: str | None = ...,
+ type: type | None = ...,
+) -> _T: ...
+
+# This form catches an explicit default argument.
+@overload
+def field(
+ *,
+ default: _T,
+ validator: _ValidatorArgType[_T] | None = ...,
+ repr: _ReprArgType = ...,
+ hash: bool | None = ...,
+ init: bool = ...,
+ metadata: Mapping[Any, Any] | None = ...,
+ converter: _ConverterType | Converter[Any, _T] | None = ...,
+ factory: Callable[[], _T] | None = ...,
+ kw_only: bool = ...,
+ eq: _EqOrderType | None = ...,
+ order: _EqOrderType | None = ...,
+ on_setattr: _OnSetAttrArgType | None = ...,
+ alias: str | None = ...,
+ type: type | None = ...,
+) -> _T: ...
+
+# This form covers type=non-Type: e.g. forward references (str), Any
+@overload
+def field(
+ *,
+ default: _T | None = ...,
+ validator: _ValidatorArgType[_T] | None = ...,
+ repr: _ReprArgType = ...,
+ hash: bool | None = ...,
+ init: bool = ...,
+ metadata: Mapping[Any, Any] | None = ...,
+ converter: _ConverterType | Converter[Any, _T] | None = ...,
+ factory: Callable[[], _T] | None = ...,
+ kw_only: bool = ...,
+ eq: _EqOrderType | None = ...,
+ order: _EqOrderType | None = ...,
+ on_setattr: _OnSetAttrArgType | None = ...,
+ alias: str | None = ...,
+ type: type | None = ...,
+) -> Any: ...
+@overload
+@dataclass_transform(field_specifiers=(attrib, field))
+def define(
+ maybe_cls: _C,
+ *,
+ these: dict[str, Any] | None = ...,
+ repr: bool = ...,
+ unsafe_hash: bool | None = ...,
+ hash: bool | None = ...,
+ init: bool = ...,
+ slots: bool = ...,
+ frozen: bool = ...,
+ weakref_slot: bool = ...,
+ str: bool = ...,
+ auto_attribs: bool = ...,
+ kw_only: bool = ...,
+ cache_hash: bool = ...,
+ auto_exc: bool = ...,
+ eq: bool | None = ...,
+ order: bool | None = ...,
+ auto_detect: bool = ...,
+ getstate_setstate: bool | None = ...,
+ on_setattr: _OnSetAttrArgType | None = ...,
+ field_transformer: _FieldTransformer | None = ...,
+ match_args: bool = ...,
+) -> _C: ...
+@overload
+@dataclass_transform(field_specifiers=(attrib, field))
+def define(
+ maybe_cls: None = ...,
+ *,
+ these: dict[str, Any] | None = ...,
+ repr: bool = ...,
+ unsafe_hash: bool | None = ...,
+ hash: bool | None = ...,
+ init: bool = ...,
+ slots: bool = ...,
+ frozen: bool = ...,
+ weakref_slot: bool = ...,
+ str: bool = ...,
+ auto_attribs: bool = ...,
+ kw_only: bool = ...,
+ cache_hash: bool = ...,
+ auto_exc: bool = ...,
+ eq: bool | None = ...,
+ order: bool | None = ...,
+ auto_detect: bool = ...,
+ getstate_setstate: bool | None = ...,
+ on_setattr: _OnSetAttrArgType | None = ...,
+ field_transformer: _FieldTransformer | None = ...,
+ match_args: bool = ...,
+) -> Callable[[_C], _C]: ...
+
+mutable = define
+
+@overload
+@dataclass_transform(frozen_default=True, field_specifiers=(attrib, field))
+def frozen(
+ maybe_cls: _C,
+ *,
+ these: dict[str, Any] | None = ...,
+ repr: bool = ...,
+ unsafe_hash: bool | None = ...,
+ hash: bool | None = ...,
+ init: bool = ...,
+ slots: bool = ...,
+ frozen: bool = ...,
+ weakref_slot: bool = ...,
+ str: bool = ...,
+ auto_attribs: bool = ...,
+ kw_only: bool = ...,
+ cache_hash: bool = ...,
+ auto_exc: bool = ...,
+ eq: bool | None = ...,
+ order: bool | None = ...,
+ auto_detect: bool = ...,
+ getstate_setstate: bool | None = ...,
+ on_setattr: _OnSetAttrArgType | None = ...,
+ field_transformer: _FieldTransformer | None = ...,
+ match_args: bool = ...,
+) -> _C: ...
+@overload
+@dataclass_transform(frozen_default=True, field_specifiers=(attrib, field))
+def frozen(
+ maybe_cls: None = ...,
+ *,
+ these: dict[str, Any] | None = ...,
+ repr: bool = ...,
+ unsafe_hash: bool | None = ...,
+ hash: bool | None = ...,
+ init: bool = ...,
+ slots: bool = ...,
+ frozen: bool = ...,
+ weakref_slot: bool = ...,
+ str: bool = ...,
+ auto_attribs: bool = ...,
+ kw_only: bool = ...,
+ cache_hash: bool = ...,
+ auto_exc: bool = ...,
+ eq: bool | None = ...,
+ order: bool | None = ...,
+ auto_detect: bool = ...,
+ getstate_setstate: bool | None = ...,
+ on_setattr: _OnSetAttrArgType | None = ...,
+ field_transformer: _FieldTransformer | None = ...,
+ match_args: bool = ...,
+) -> Callable[[_C], _C]: ...
diff --git a/lambdas/aws-dd-forwarder-3.127.0/attrs/converters.py b/lambdas/aws-dd-forwarder-3.127.0/attrs/converters.py
new file mode 100644
index 0000000..7821f6c
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/attrs/converters.py
@@ -0,0 +1,3 @@
+# SPDX-License-Identifier: MIT
+
+from attr.converters import * # noqa: F403
diff --git a/lambdas/aws-dd-forwarder-3.127.0/attrs/exceptions.py b/lambdas/aws-dd-forwarder-3.127.0/attrs/exceptions.py
new file mode 100644
index 0000000..3323f9d
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/attrs/exceptions.py
@@ -0,0 +1,3 @@
+# SPDX-License-Identifier: MIT
+
+from attr.exceptions import * # noqa: F403
diff --git a/lambdas/aws-dd-forwarder-3.127.0/attrs/filters.py b/lambdas/aws-dd-forwarder-3.127.0/attrs/filters.py
new file mode 100644
index 0000000..3080f48
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/attrs/filters.py
@@ -0,0 +1,3 @@
+# SPDX-License-Identifier: MIT
+
+from attr.filters import * # noqa: F403
diff --git a/lambdas/aws-dd-forwarder-3.127.0/attrs/py.typed b/lambdas/aws-dd-forwarder-3.127.0/attrs/py.typed
new file mode 100644
index 0000000..e69de29
diff --git a/lambdas/aws-dd-forwarder-3.127.0/attrs/setters.py b/lambdas/aws-dd-forwarder-3.127.0/attrs/setters.py
new file mode 100644
index 0000000..f3d73bb
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/attrs/setters.py
@@ -0,0 +1,3 @@
+# SPDX-License-Identifier: MIT
+
+from attr.setters import * # noqa: F403
diff --git a/lambdas/aws-dd-forwarder-3.127.0/attrs/validators.py b/lambdas/aws-dd-forwarder-3.127.0/attrs/validators.py
new file mode 100644
index 0000000..037e124
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/attrs/validators.py
@@ -0,0 +1,3 @@
+# SPDX-License-Identifier: MIT
+
+from attr.validators import * # noqa: F403
diff --git a/lambdas/aws-dd-forwarder-3.127.0/bin/ddtrace-run b/lambdas/aws-dd-forwarder-3.127.0/bin/ddtrace-run
new file mode 100755
index 0000000..0cc0787
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/bin/ddtrace-run
@@ -0,0 +1,8 @@
+#!/usr/local/bin/python3.11
+# -*- coding: utf-8 -*-
+import re
+import sys
+from ddtrace.commands.ddtrace_run import main
+if __name__ == '__main__':
+ sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
+ sys.exit(main())
diff --git a/lambdas/aws-dd-forwarder-3.127.0/bin/dog b/lambdas/aws-dd-forwarder-3.127.0/bin/dog
new file mode 100755
index 0000000..7111893
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/bin/dog
@@ -0,0 +1,8 @@
+#!/usr/local/bin/python3.11
+# -*- coding: utf-8 -*-
+import re
+import sys
+from datadog.dogshell import main
+if __name__ == '__main__':
+ sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
+ sys.exit(main())
diff --git a/lambdas/aws-dd-forwarder-3.127.0/bin/dogshell b/lambdas/aws-dd-forwarder-3.127.0/bin/dogshell
new file mode 100755
index 0000000..7111893
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/bin/dogshell
@@ -0,0 +1,8 @@
+#!/usr/local/bin/python3.11
+# -*- coding: utf-8 -*-
+import re
+import sys
+from datadog.dogshell import main
+if __name__ == '__main__':
+ sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
+ sys.exit(main())
diff --git a/lambdas/aws-dd-forwarder-3.127.0/bin/dogshellwrap b/lambdas/aws-dd-forwarder-3.127.0/bin/dogshellwrap
new file mode 100755
index 0000000..5be900e
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/bin/dogshellwrap
@@ -0,0 +1,8 @@
+#!/usr/local/bin/python3.11
+# -*- coding: utf-8 -*-
+import re
+import sys
+from datadog.dogshell.wrap import main
+if __name__ == '__main__':
+ sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
+ sys.exit(main())
diff --git a/lambdas/aws-dd-forwarder-3.127.0/bin/dogwrap b/lambdas/aws-dd-forwarder-3.127.0/bin/dogwrap
new file mode 100755
index 0000000..5be900e
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/bin/dogwrap
@@ -0,0 +1,8 @@
+#!/usr/local/bin/python3.11
+# -*- coding: utf-8 -*-
+import re
+import sys
+from datadog.dogshell.wrap import main
+if __name__ == '__main__':
+ sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
+ sys.exit(main())
diff --git a/lambdas/aws-dd-forwarder-3.127.0/bin/normalizer b/lambdas/aws-dd-forwarder-3.127.0/bin/normalizer
new file mode 100755
index 0000000..aae4757
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/bin/normalizer
@@ -0,0 +1,8 @@
+#!/usr/local/bin/python3.11
+# -*- coding: utf-8 -*-
+import re
+import sys
+from charset_normalizer.cli import cli_detect
+if __name__ == '__main__':
+ sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
+ sys.exit(cli_detect())
diff --git a/lambdas/aws-dd-forwarder-3.127.0/bytecode-0.15.1.dist-info/COPYING b/lambdas/aws-dd-forwarder-3.127.0/bytecode-0.15.1.dist-info/COPYING
new file mode 100644
index 0000000..ba5a523
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/bytecode-0.15.1.dist-info/COPYING
@@ -0,0 +1,21 @@
+The MIT License (MIT)
+Copyright Contributors to the bytecode project.
+
+Permission is hereby granted, free of charge, to any person obtaining a
+copy of this software and associated documentation files (the
+"Software"), to deal in the Software without restriction, including
+without limitation the rights to use, copy, modify, merge, publish,
+distribute, sublicense, and/or sell copies of the Software, and to
+permit persons to whom the Software is furnished to do so, subject to
+the following conditions:
+
+The above copyright notice and this permission notice shall be included
+in all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
+OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
diff --git a/lambdas/aws-dd-forwarder-3.127.0/bytecode-0.15.1.dist-info/INSTALLER b/lambdas/aws-dd-forwarder-3.127.0/bytecode-0.15.1.dist-info/INSTALLER
new file mode 100644
index 0000000..a1b589e
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/bytecode-0.15.1.dist-info/INSTALLER
@@ -0,0 +1 @@
+pip
diff --git a/lambdas/aws-dd-forwarder-3.127.0/bytecode-0.15.1.dist-info/METADATA b/lambdas/aws-dd-forwarder-3.127.0/bytecode-0.15.1.dist-info/METADATA
new file mode 100644
index 0000000..19faf45
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/bytecode-0.15.1.dist-info/METADATA
@@ -0,0 +1,102 @@
+Metadata-Version: 2.1
+Name: bytecode
+Version: 0.15.1
+Summary: Python module to generate and modify bytecode
+Author-email: Victor Stinner
+Maintainer-email: "Matthieu C. Dartiailh"
+License: The MIT License (MIT)
+ Copyright Contributors to the bytecode project.
+
+ Permission is hereby granted, free of charge, to any person obtaining a
+ copy of this software and associated documentation files (the
+ "Software"), to deal in the Software without restriction, including
+ without limitation the rights to use, copy, modify, merge, publish,
+ distribute, sublicense, and/or sell copies of the Software, and to
+ permit persons to whom the Software is furnished to do so, subject to
+ the following conditions:
+
+ The above copyright notice and this permission notice shall be included
+ in all copies or substantial portions of the Software.
+
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
+ OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+ IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+ CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+ TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+ SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+
+Project-URL: homepage, https://github.com/MatthieuDartiailh/bytecode
+Project-URL: documentation, https://bytecode.readthedocs.io/en/latest/
+Project-URL: repository, https://github.com/MatthieuDartiailh/bytecode
+Project-URL: changelog, https://github.com/MatthieuDartiailh/bytecode/blob/main/doc/changelog.rst
+Classifier: Development Status :: 4 - Beta
+Classifier: Intended Audience :: Developers
+Classifier: License :: OSI Approved :: MIT License
+Classifier: Natural Language :: English
+Classifier: Operating System :: OS Independent
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3.8
+Classifier: Programming Language :: Python :: 3.9
+Classifier: Programming Language :: Python :: 3.10
+Classifier: Programming Language :: Python :: 3.11
+Classifier: Programming Language :: Python :: 3.12
+Classifier: Topic :: Software Development :: Libraries :: Python Modules
+Requires-Python: >=3.8
+Description-Content-Type: text/x-rst
+License-File: COPYING
+Requires-Dist: typing-extensions ; python_version < "3.10"
+
+********
+bytecode
+********
+
+.. image:: https://img.shields.io/pypi/v/bytecode.svg
+ :alt: Latest release on the Python Cheeseshop (PyPI)
+ :target: https://pypi.python.org/pypi/bytecode
+
+.. image:: https://github.com/MatthieuDartiailh/bytecode/workflows/Continuous%20Integration/badge.svg
+ :target: https://github.com/MatthieuDartiailh/bytecode/actions
+ :alt: Continuous integration
+
+.. image:: https://github.com/MatthieuDartiailh/bytecode/workflows/Documentation%20building/badge.svg
+ :target: https://github.com/MatthieuDartiailh/bytecode/actions
+ :alt: Documentation building
+
+.. image:: https://img.shields.io/codecov/c/github/MatthieuDartiailh/bytecode/master.svg
+ :alt: Code coverage of bytecode on codecov.io
+ :target: https://codecov.io/github/MatthieuDartiailh/bytecode
+
+.. image:: https://img.shields.io/badge/code%20style-black-000000.svg
+ :alt: Code formatted using Black
+ :target: https://github.com/psf/black
+
+``bytecode`` is a Python module to generate and modify bytecode.
+
+* `bytecode project homepage at GitHub
+ `_ (code, bugs)
+* `bytecode documentation
+ `_
+* `Download latest bytecode release at the Python Cheeseshop (PyPI)
+ `_
+
+Install bytecode: ``python3 -m pip install bytecode``. It requires Python 3.8
+or newer. The latest release that supports Python 3.7 and 3.6 is 0.13.0.
+The latest release that supports Python 3.5 is 0.12.0. For Python 2.7 support,
+have a look at `dead-bytecode `_
+instead.
+
+Example executing ``print('Hello World!')``:
+
+.. code:: python
+
+ from bytecode import Instr, Bytecode
+
+ bytecode = Bytecode([Instr("LOAD_NAME", 'print'),
+ Instr("LOAD_CONST", 'Hello World!'),
+ Instr("CALL_FUNCTION", 1),
+ Instr("POP_TOP"),
+ Instr("LOAD_CONST", None),
+ Instr("RETURN_VALUE")])
+ code = bytecode.to_code()
+ exec(code)
diff --git a/lambdas/aws-dd-forwarder-3.127.0/bytecode-0.15.1.dist-info/RECORD b/lambdas/aws-dd-forwarder-3.127.0/bytecode-0.15.1.dist-info/RECORD
new file mode 100644
index 0000000..49b55cf
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/bytecode-0.15.1.dist-info/RECORD
@@ -0,0 +1,22 @@
+bytecode-0.15.1.dist-info/COPYING,sha256=15CDvwHVcioF_s6S_mWdkWdw96tvB21WZKc8jvc8N5M,1094
+bytecode-0.15.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
+bytecode-0.15.1.dist-info/METADATA,sha256=btrMOPa27_H0V6neBiLPJiunLrto9ukEE-PWoTtFGvM,4627
+bytecode-0.15.1.dist-info/RECORD,,
+bytecode-0.15.1.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+bytecode-0.15.1.dist-info/WHEEL,sha256=yQN5g4mg4AybRjkgi-9yy4iQEFibGQmlz78Pik5Or-A,92
+bytecode-0.15.1.dist-info/top_level.txt,sha256=9BhdB7HqYZ-PvHNoWX6ilwLYWQqcgEOLwdb3aXm5Gys,9
+bytecode/__init__.py,sha256=lsE6qqd_1wYjGq6s3q1Rhz1AyAjf98F4iJSrfg01F3k,6919
+bytecode/__pycache__/__init__.cpython-311.pyc,,
+bytecode/__pycache__/bytecode.cpython-311.pyc,,
+bytecode/__pycache__/cfg.cpython-311.pyc,,
+bytecode/__pycache__/concrete.cpython-311.pyc,,
+bytecode/__pycache__/flags.cpython-311.pyc,,
+bytecode/__pycache__/instr.cpython-311.pyc,,
+bytecode/__pycache__/version.cpython-311.pyc,,
+bytecode/bytecode.py,sha256=6oveflTRGnrzTQEP9Z0tp6ySwmXQ_DXIibdAGOZt5lY,11126
+bytecode/cfg.py,sha256=J0FOZD1n-LbPLGmPRggmj_1SxWZvcQQbuXeUDskRDv8,41785
+bytecode/concrete.py,sha256=NVsAef1Ya5MvhZfx0xKclP4eearg7vAixY2RpHtQFhk,52168
+bytecode/flags.py,sha256=eY4nrTIDkOBYswI-wXQ-p3mKfriH7pUNYaDien4OI6g,6189
+bytecode/instr.py,sha256=2fynmuZq46eXDyzIMS1e3wzGpXnm7BuY7rHGSsFkh7U,26777
+bytecode/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+bytecode/version.py,sha256=kz4YxQj6evqzVm2eaPEN9t8SwhJI1_YkLx-G2dMjhoI,519
diff --git a/lambdas/aws-dd-forwarder-3.127.0/bytecode-0.15.1.dist-info/REQUESTED b/lambdas/aws-dd-forwarder-3.127.0/bytecode-0.15.1.dist-info/REQUESTED
new file mode 100644
index 0000000..e69de29
diff --git a/lambdas/aws-dd-forwarder-3.127.0/bytecode-0.15.1.dist-info/WHEEL b/lambdas/aws-dd-forwarder-3.127.0/bytecode-0.15.1.dist-info/WHEEL
new file mode 100644
index 0000000..7e68873
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/bytecode-0.15.1.dist-info/WHEEL
@@ -0,0 +1,5 @@
+Wheel-Version: 1.0
+Generator: bdist_wheel (0.41.2)
+Root-Is-Purelib: true
+Tag: py3-none-any
+
diff --git a/lambdas/aws-dd-forwarder-3.127.0/bytecode-0.15.1.dist-info/top_level.txt b/lambdas/aws-dd-forwarder-3.127.0/bytecode-0.15.1.dist-info/top_level.txt
new file mode 100644
index 0000000..b37707e
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/bytecode-0.15.1.dist-info/top_level.txt
@@ -0,0 +1 @@
+bytecode
diff --git a/lambdas/aws-dd-forwarder-3.127.0/bytecode/__init__.py b/lambdas/aws-dd-forwarder-3.127.0/bytecode/__init__.py
new file mode 100644
index 0000000..11eb7d6
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/bytecode/__init__.py
@@ -0,0 +1,218 @@
+__all__ = [
+ "Label",
+ "Instr",
+ "SetLineno",
+ "Bytecode",
+ "ConcreteInstr",
+ "ConcreteBytecode",
+ "ControlFlowGraph",
+ "CompilerFlags",
+ "Compare",
+ "BinaryOp",
+ "__version__",
+]
+
+from io import StringIO
+from typing import List, Union
+
+# import needed to use it in bytecode.py
+from bytecode.bytecode import ( # noqa
+ BaseBytecode,
+ Bytecode,
+ _BaseBytecodeList,
+ _InstrList,
+)
+
+# import needed to use it in bytecode.py
+from bytecode.cfg import BasicBlock, ControlFlowGraph # noqa
+
+# import needed to use it in bytecode.py
+from bytecode.concrete import _ConvertBytecodeToConcrete # noqa
+from bytecode.concrete import ConcreteBytecode, ConcreteInstr
+from bytecode.flags import CompilerFlags
+
+# import needed to use it in bytecode.py
+from bytecode.instr import ( # noqa
+ UNSET,
+ BinaryOp,
+ CellVar,
+ Compare,
+ FreeVar,
+ Instr,
+ Intrinsic1Op,
+ Intrinsic2Op,
+ Label,
+ SetLineno,
+ TryBegin,
+ TryEnd,
+)
+from bytecode.version import __version__
+
+
+def format_bytecode(
+ bytecode: Union[Bytecode, ConcreteBytecode, ControlFlowGraph],
+ *,
+ lineno: bool = False,
+) -> str:
+ try_begins: List[TryBegin] = []
+
+ def format_line(index, line):
+ nonlocal cur_lineno, prev_lineno
+ if lineno:
+ if cur_lineno != prev_lineno:
+ line = "L.% 3s % 3s: %s" % (cur_lineno, index, line)
+ prev_lineno = cur_lineno
+ else:
+ line = " % 3s: %s" % (index, line)
+ else:
+ line = line
+ return line
+
+ def format_instr(instr, labels=None):
+ text = instr.name
+ arg = instr._arg
+ if arg is not UNSET:
+ if isinstance(arg, Label):
+ try:
+ arg = "<%s>" % labels[arg]
+ except KeyError:
+ arg = ""
+ elif isinstance(arg, BasicBlock):
+ try:
+ arg = "<%s>" % labels[id(arg)]
+ except KeyError:
+ arg = ""
+ else:
+ arg = repr(arg)
+ text = "%s %s" % (text, arg)
+ return text
+
+ def format_try_begin(instr: TryBegin, labels: dict) -> str:
+ if isinstance(instr.target, Label):
+ try:
+ arg = "<%s>" % labels[instr.target]
+ except KeyError:
+ arg = ""
+ else:
+ try:
+ arg = "<%s>" % labels[id(instr.target)]
+ except KeyError:
+ arg = ""
+ line = "TryBegin %s -> %s [%s]" % (
+ len(try_begins),
+ arg,
+ instr.stack_depth,
+ ) + (" last_i" if instr.push_lasti else "")
+
+ # Track the seen try begin
+ try_begins.append(instr)
+
+ return line
+
+ def format_try_end(instr: TryEnd) -> str:
+ i = try_begins.index(instr.entry) if instr.entry in try_begins else ""
+ return "TryEnd (%s)" % i
+
+ buffer = StringIO()
+
+ indent = " " * 4
+
+ cur_lineno = bytecode.first_lineno
+ prev_lineno = None
+
+ if isinstance(bytecode, ConcreteBytecode):
+ offset = 0
+ for c_instr in bytecode:
+ fields = []
+ if c_instr.lineno is not None:
+ cur_lineno = c_instr.lineno
+ if lineno:
+ fields.append(format_instr(c_instr))
+ line = "".join(fields)
+ line = format_line(offset, line)
+ else:
+ fields.append("% 3s %s" % (offset, format_instr(c_instr)))
+ line = "".join(fields)
+ buffer.write(line + "\n")
+
+ if isinstance(c_instr, ConcreteInstr):
+ offset += c_instr.size
+
+ if bytecode.exception_table:
+ buffer.write("\n")
+ buffer.write("Exception table:\n")
+ for entry in bytecode.exception_table:
+ buffer.write(
+ f"{entry.start_offset} to {entry.stop_offset} -> "
+ f"{entry.target} [{entry.stack_depth}]"
+ + (" lasti" if entry.push_lasti else "")
+ + "\n"
+ )
+
+ elif isinstance(bytecode, Bytecode):
+ labels: dict[Label, str] = {}
+ for index, instr in enumerate(bytecode):
+ if isinstance(instr, Label):
+ labels[instr] = "label_instr%s" % index
+
+ for index, instr in enumerate(bytecode):
+ if isinstance(instr, Label):
+ label = labels[instr]
+ line = "%s:" % label
+ if index != 0:
+ buffer.write("\n")
+ elif isinstance(instr, TryBegin):
+ line = indent + format_line(index, format_try_begin(instr, labels))
+ indent += " "
+ elif isinstance(instr, TryEnd):
+ indent = indent[:-2]
+ line = indent + format_line(index, format_try_end(instr))
+ else:
+ if instr.lineno is not None:
+ cur_lineno = instr.lineno
+ line = format_instr(instr, labels)
+ line = indent + format_line(index, line)
+ buffer.write(line + "\n")
+ buffer.write("\n")
+
+ elif isinstance(bytecode, ControlFlowGraph):
+ cfg_labels = {}
+ for block_index, block in enumerate(bytecode, 1):
+ cfg_labels[id(block)] = "block%s" % block_index
+
+ for block_index, block in enumerate(bytecode, 1):
+ buffer.write("%s:\n" % cfg_labels[id(block)])
+ seen_instr = False
+ for index, instr in enumerate(block):
+ if isinstance(instr, TryBegin):
+ line = indent + format_line(
+ index, format_try_begin(instr, cfg_labels)
+ )
+ indent += " "
+ elif isinstance(instr, TryEnd):
+ if seen_instr:
+ indent = indent[:-2]
+ line = indent + format_line(index, format_try_end(instr))
+ else:
+ if isinstance(instr, Instr):
+ seen_instr = True
+ if instr.lineno is not None:
+ cur_lineno = instr.lineno
+ line = format_instr(instr, cfg_labels)
+ line = indent + format_line(index, line)
+ buffer.write(line + "\n")
+ if block.next_block is not None:
+ buffer.write(indent + "-> %s\n" % cfg_labels[id(block.next_block)])
+ buffer.write("\n")
+ else:
+ raise TypeError("unknown bytecode class")
+
+ return buffer.getvalue()[:-1]
+
+
+def dump_bytecode(
+ bytecode: Union[Bytecode, ConcreteBytecode, ControlFlowGraph],
+ *,
+ lineno: bool = False,
+) -> None:
+ print(format_bytecode(bytecode, lineno=lineno))
diff --git a/lambdas/aws-dd-forwarder-3.127.0/bytecode/bytecode.py b/lambdas/aws-dd-forwarder-3.127.0/bytecode/bytecode.py
new file mode 100644
index 0000000..149bb37
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/bytecode/bytecode.py
@@ -0,0 +1,330 @@
+# alias to keep the 'bytecode' variable free
+import sys
+import types
+from abc import abstractmethod
+from typing import (
+ Any,
+ Dict,
+ Generic,
+ Iterator,
+ List,
+ Optional,
+ Sequence,
+ SupportsIndex,
+ TypeVar,
+ Union,
+ overload,
+)
+
+import bytecode as _bytecode
+from bytecode.flags import CompilerFlags, infer_flags
+from bytecode.instr import (
+ _UNSET,
+ UNSET,
+ BaseInstr,
+ Instr,
+ Label,
+ SetLineno,
+ TryBegin,
+ TryEnd,
+)
+
+
+class BaseBytecode:
+ def __init__(self) -> None:
+ self.argcount = 0
+ self.posonlyargcount = 0
+ self.kwonlyargcount = 0
+ self.first_lineno = 1
+ self.name = ""
+ self.qualname = self.name
+ self.filename = ""
+ self.docstring: Union[str, None, _UNSET] = UNSET
+ # We cannot recreate cellvars/freevars from instructions because of super()
+ # special-case, which involves an implicit __class__ cell/free variable
+ # We could try to detect it.
+ # CPython itself breaks if one aliases super so we could maybe make it work
+ # but it will require careful design and will be done later in the future.
+ self.cellvars: List[str] = []
+ self.freevars: List[str] = []
+ self._flags: CompilerFlags = CompilerFlags(0)
+
+ def _copy_attr_from(self, bytecode: "BaseBytecode") -> None:
+ self.argcount = bytecode.argcount
+ self.posonlyargcount = bytecode.posonlyargcount
+ self.kwonlyargcount = bytecode.kwonlyargcount
+ self.flags = bytecode.flags
+ self.first_lineno = bytecode.first_lineno
+ self.name = bytecode.name
+ self.qualname = bytecode.qualname
+ self.filename = bytecode.filename
+ self.docstring = bytecode.docstring
+ self.cellvars = list(bytecode.cellvars)
+ self.freevars = list(bytecode.freevars)
+
+ def __eq__(self, other: Any) -> bool:
+ if type(self) is not type(other):
+ return False
+
+ if self.argcount != other.argcount:
+ return False
+ if self.posonlyargcount != other.posonlyargcount:
+ return False
+ if self.kwonlyargcount != other.kwonlyargcount:
+ return False
+ if self.flags != other.flags:
+ return False
+ if self.first_lineno != other.first_lineno:
+ return False
+ if self.filename != other.filename:
+ return False
+ if self.name != other.name:
+ return False
+ if self.qualname != other.qualname:
+ return False
+ if self.docstring != other.docstring:
+ return False
+ if self.cellvars != other.cellvars:
+ return False
+ if self.freevars != other.freevars:
+ return False
+ if self.compute_stacksize() != other.compute_stacksize():
+ return False
+
+ return True
+
+ @property
+ def flags(self) -> CompilerFlags:
+ return self._flags
+
+ @flags.setter
+ def flags(self, value: CompilerFlags) -> None:
+ if not isinstance(value, CompilerFlags):
+ value = CompilerFlags(value)
+ self._flags = value
+
+ def update_flags(self, *, is_async: Optional[bool] = None) -> None:
+ # infer_flags reasonably only accept concrete subclasses
+ self.flags = infer_flags(self, is_async) # type: ignore
+
+ @abstractmethod
+ def compute_stacksize(self, *, check_pre_and_post: bool = True) -> int:
+ raise NotImplementedError
+
+
+T = TypeVar("T", bound="_BaseBytecodeList")
+U = TypeVar("U")
+
+
+class _BaseBytecodeList(BaseBytecode, list, Generic[U]):
+ """List subclass providing type stable slicing and copying."""
+
+ @overload
+ def __getitem__(self, index: SupportsIndex) -> U:
+ ...
+
+ @overload
+ def __getitem__(self: T, index: slice) -> T:
+ ...
+
+ def __getitem__(self, index):
+ value = super().__getitem__(index)
+ if isinstance(index, slice):
+ value = type(self)(value)
+ value._copy_attr_from(self)
+
+ return value
+
+ def copy(self: T) -> T:
+ # This is a list subclass and works
+ new = type(self)(super().copy()) # type: ignore
+ new._copy_attr_from(self)
+ return new
+
+ def legalize(self) -> None:
+ """Check that all the element of the list are valid and remove SetLineno."""
+ lineno_pos = []
+ set_lineno = None
+ current_lineno = self.first_lineno
+
+ for pos, instr in enumerate(self):
+ if isinstance(instr, SetLineno):
+ set_lineno = instr.lineno
+ lineno_pos.append(pos)
+ continue
+ # Filter out other pseudo instructions
+ if not isinstance(instr, BaseInstr):
+ continue
+ if set_lineno is not None:
+ instr.lineno = set_lineno
+ elif instr.lineno is UNSET:
+ instr.lineno = current_lineno
+ elif instr.lineno is not None:
+ current_lineno = instr.lineno
+
+ for i in reversed(lineno_pos):
+ del self[i]
+
+ def __iter__(self) -> Iterator[U]:
+ instructions = super().__iter__()
+ for instr in instructions:
+ self._check_instr(instr)
+ yield instr
+
+ def _check_instr(self, instr):
+ raise NotImplementedError()
+
+
+V = TypeVar("V")
+
+
+class _InstrList(List[V]):
+ # Providing a stricter typing for this helper whose use is limited to the __eq__
+ # implementation is more effort than it is worth.
+ def _flat(self) -> List:
+ instructions: List = []
+ labels = {}
+ jumps = []
+ try_begins: Dict[TryBegin, int] = {}
+ try_jumps = []
+
+ offset = 0
+ instr: Any
+ for index, instr in enumerate(self):
+ if isinstance(instr, Label):
+ instructions.append("label_instr%s" % index)
+ labels[instr] = offset
+ elif isinstance(instr, TryBegin):
+ try_begins.setdefault(instr, len(try_begins))
+ assert isinstance(instr.target, Label)
+ try_jumps.append((instr.target, len(instructions)))
+ instructions.append(instr)
+ elif isinstance(instr, TryEnd):
+ instructions.append(("TryEnd", try_begins[instr.entry]))
+ else:
+ if isinstance(instr, Instr) and isinstance(instr.arg, Label):
+ target_label = instr.arg
+ instr = _bytecode.ConcreteInstr(
+ instr.name, 0, location=instr.location
+ )
+ jumps.append((target_label, instr))
+ instructions.append(instr)
+ offset += 1
+
+ for target_label, instr in jumps:
+ instr.arg = labels[target_label]
+
+ for target_label, index in try_jumps:
+ instr = instructions[index]
+ assert isinstance(instr, TryBegin)
+ instructions[index] = (
+ "TryBegin",
+ try_begins[instr],
+ labels[target_label],
+ instr.push_lasti,
+ )
+
+ return instructions
+
+ def __eq__(self, other: Any) -> bool:
+ if not isinstance(other, _InstrList):
+ other = _InstrList(other)
+
+ return self._flat() == other._flat()
+
+
+class Bytecode(
+ _InstrList[Union[Instr, Label, TryBegin, TryEnd, SetLineno]],
+ _BaseBytecodeList[Union[Instr, Label, TryBegin, TryEnd, SetLineno]],
+):
+ def __init__(
+ self,
+ instructions: Sequence[Union[Instr, Label, TryBegin, TryEnd, SetLineno]] = (),
+ ) -> None:
+ BaseBytecode.__init__(self)
+ self.argnames: List[str] = []
+ for instr in instructions:
+ self._check_instr(instr)
+ self.extend(instructions)
+
+ def __iter__(self) -> Iterator[Union[Instr, Label, TryBegin, TryEnd, SetLineno]]:
+ instructions = super().__iter__()
+ seen_try_begin = False
+ for instr in instructions:
+ self._check_instr(instr)
+ if isinstance(instr, TryBegin):
+ if seen_try_begin:
+ raise RuntimeError("TryBegin pseudo instructions cannot be nested.")
+ seen_try_begin = True
+ elif isinstance(instr, TryEnd):
+ seen_try_begin = False
+ yield instr
+
+ def _check_instr(self, instr: Any) -> None:
+ if not isinstance(instr, (Label, SetLineno, Instr, TryBegin, TryEnd)):
+ raise ValueError(
+ "Bytecode must only contain Label, "
+ "SetLineno, and Instr objects, "
+ "but %s was found" % type(instr).__name__
+ )
+
+ def _copy_attr_from(self, bytecode: BaseBytecode) -> None:
+ super()._copy_attr_from(bytecode)
+ if isinstance(bytecode, Bytecode):
+ self.argnames = bytecode.argnames
+
+ @staticmethod
+ def from_code(
+ code: types.CodeType,
+ prune_caches: bool = True,
+ conserve_exception_block_stackdepth: bool = False,
+ ) -> "Bytecode":
+ concrete = _bytecode.ConcreteBytecode.from_code(code)
+ return concrete.to_bytecode(
+ prune_caches=prune_caches,
+ conserve_exception_block_stackdepth=conserve_exception_block_stackdepth,
+ )
+
+ def compute_stacksize(self, *, check_pre_and_post: bool = True) -> int:
+ cfg = _bytecode.ControlFlowGraph.from_bytecode(self)
+ return cfg.compute_stacksize(check_pre_and_post=check_pre_and_post)
+
+ def to_code(
+ self,
+ compute_jumps_passes: Optional[int] = None,
+ stacksize: Optional[int] = None,
+ *,
+ check_pre_and_post: bool = True,
+ compute_exception_stack_depths: bool = True,
+ ) -> types.CodeType:
+ # Prevent reconverting the concrete bytecode to bytecode and cfg to do the
+ # calculation if we need to do it.
+ if stacksize is None or (
+ sys.version_info >= (3, 11) and compute_exception_stack_depths
+ ):
+ cfg = _bytecode.ControlFlowGraph.from_bytecode(self)
+ stacksize = cfg.compute_stacksize(
+ check_pre_and_post=check_pre_and_post,
+ compute_exception_stack_depths=compute_exception_stack_depths,
+ )
+ self = cfg.to_bytecode()
+ compute_exception_stack_depths = False # avoid redoing everything
+ bc = self.to_concrete_bytecode(
+ compute_jumps_passes=compute_jumps_passes,
+ compute_exception_stack_depths=compute_exception_stack_depths,
+ )
+ return bc.to_code(
+ stacksize=stacksize,
+ compute_exception_stack_depths=compute_exception_stack_depths,
+ )
+
+ def to_concrete_bytecode(
+ self,
+ compute_jumps_passes: Optional[int] = None,
+ compute_exception_stack_depths: bool = True,
+ ) -> "_bytecode.ConcreteBytecode":
+ converter = _bytecode._ConvertBytecodeToConcrete(self)
+ return converter.to_concrete_bytecode(
+ compute_jumps_passes=compute_jumps_passes,
+ compute_exception_stack_depths=compute_exception_stack_depths,
+ )
diff --git a/lambdas/aws-dd-forwarder-3.127.0/bytecode/cfg.py b/lambdas/aws-dd-forwarder-3.127.0/bytecode/cfg.py
new file mode 100644
index 0000000..7f554fa
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/bytecode/cfg.py
@@ -0,0 +1,1061 @@
+import sys
+import types
+from collections import defaultdict
+from dataclasses import dataclass
+from typing import (
+ Any,
+ Dict,
+ Generator,
+ Iterable,
+ Iterator,
+ List,
+ Optional,
+ Set,
+ SupportsIndex,
+ Tuple,
+ TypeVar,
+ Union,
+ overload,
+)
+
+# alias to keep the 'bytecode' variable free
+import bytecode as _bytecode
+from bytecode.concrete import ConcreteInstr
+from bytecode.flags import CompilerFlags
+from bytecode.instr import UNSET, Instr, Label, SetLineno, TryBegin, TryEnd
+
+T = TypeVar("T", bound="BasicBlock")
+U = TypeVar("U", bound="ControlFlowGraph")
+
+
+class BasicBlock(_bytecode._InstrList[Union[Instr, SetLineno, TryBegin, TryEnd]]):
+ def __init__(
+ self,
+ instructions: Optional[
+ Iterable[Union[Instr, SetLineno, TryBegin, TryEnd]]
+ ] = None,
+ ) -> None:
+ # a BasicBlock object, or None
+ self.next_block: Optional["BasicBlock"] = None
+ if instructions:
+ super().__init__(instructions)
+
+ def __iter__(self) -> Iterator[Union[Instr, SetLineno, TryBegin, TryEnd]]:
+ index = 0
+ while index < len(self):
+ instr = self[index]
+ index += 1
+
+ if not isinstance(instr, (SetLineno, Instr, TryBegin, TryEnd)):
+ raise ValueError(
+ "BasicBlock must only contain SetLineno and Instr objects, "
+ "but %s was found" % instr.__class__.__name__
+ )
+
+ if isinstance(instr, Instr) and instr.has_jump():
+ if index < len(self) and any(
+ isinstance(self[i], Instr) for i in range(index, len(self))
+ ):
+ raise ValueError(
+ "Only the last instruction of a basic " "block can be a jump"
+ )
+
+ if not isinstance(instr.arg, BasicBlock):
+ raise ValueError(
+ "Jump target must a BasicBlock, got %s",
+ type(instr.arg).__name__,
+ )
+
+ if isinstance(instr, TryBegin):
+ if not isinstance(instr.target, BasicBlock):
+ raise ValueError(
+ "TryBegin target must a BasicBlock, got %s",
+ type(instr.target).__name__,
+ )
+
+ yield instr
+
+ @overload
+ def __getitem__(
+ self, index: SupportsIndex
+ ) -> Union[Instr, SetLineno, TryBegin, TryEnd]:
+ ...
+
+ @overload
+ def __getitem__(self: T, index: slice) -> T:
+ ...
+
+ def __getitem__(self, index):
+ value = super().__getitem__(index)
+ if isinstance(index, slice):
+ value = type(self)(value)
+ value.next_block = self.next_block
+
+ return value
+
+ def get_last_non_artificial_instruction(self) -> Optional[Instr]:
+ for instr in reversed(self):
+ if isinstance(instr, Instr):
+ return instr
+
+ return None
+
+ def copy(self: T) -> T:
+ new = type(self)(super().copy())
+ new.next_block = self.next_block
+ return new
+
+ def legalize(self, first_lineno: int) -> int:
+ """Check that all the element of the list are valid and remove SetLineno."""
+ lineno_pos = []
+ set_lineno = None
+ current_lineno = first_lineno
+
+ for pos, instr in enumerate(self):
+ if isinstance(instr, SetLineno):
+ set_lineno = current_lineno = instr.lineno
+ lineno_pos.append(pos)
+ continue
+ if isinstance(instr, (TryBegin, TryEnd)):
+ continue
+
+ if set_lineno is not None:
+ instr.lineno = set_lineno
+ elif instr.lineno is UNSET:
+ instr.lineno = current_lineno
+ elif instr.lineno is not None:
+ current_lineno = instr.lineno
+
+ for i in reversed(lineno_pos):
+ del self[i]
+
+ return current_lineno
+
+ def get_jump(self) -> Optional["BasicBlock"]:
+ if not self:
+ return None
+
+ last_instr = self.get_last_non_artificial_instruction()
+ if last_instr is None or not last_instr.has_jump():
+ return None
+
+ target_block = last_instr.arg
+ assert isinstance(target_block, BasicBlock)
+ return target_block
+
+ def get_trailing_try_end(self, index: int):
+ while index + 1 < len(self):
+ if isinstance(b := self[index + 1], TryEnd):
+ return b
+ index += 1
+
+ return None
+
+
+def _update_size(pre_delta, post_delta, size, maxsize, minsize):
+ size += pre_delta
+ if size < 0:
+ msg = "Failed to compute stacksize, got negative size"
+ raise RuntimeError(msg)
+ size += post_delta
+ maxsize = max(maxsize, size)
+ minsize = min(minsize, size)
+ return size, maxsize, minsize
+
+
+# We can never have nested TryBegin, so we can simply update the min stack size
+# when we encounter one and use the number we have when we encounter the TryEnd
+
+
+@dataclass
+class _StackSizeComputationStorage:
+ """Common storage shared by the computers involved in computing CFG stack usage."""
+
+ #: Should we check that all stack operation are "safe" i.e. occurs while there
+ #: is a sufficient number of items on the stack.
+ check_pre_and_post: bool
+
+ #: Id the blocks for which an analysis is under progress to avoid getting stuck
+ #: in recursions.
+ seen_blocks: Set[int]
+
+ #: Sizes and exception handling status with which the analysis of the block
+ #: has been performed. Used to avoid running multiple times equivalent analysis.
+ blocks_startsizes: Dict[int, Set[Tuple[int, Optional[bool]]]]
+
+ #: Track the encountered TryBegin pseudo-instruction to update their target
+ #: depth at the end of the calculation.
+ try_begins: List[TryBegin]
+
+ #: Stacksize that should be used for exception blocks. This is the smallest size
+ #: with which this block was reached which is the only size that can be safely
+ #: restored.
+ exception_block_startsize: Dict[int, int]
+
+ #: Largest stack size used in an exception block. We record the size corresponding
+ #: to the smallest start size for the block since the interpreter enforces that
+ #: we start with this size.
+ exception_block_maxsize: Dict[int, int]
+
+
+class _StackSizeComputer:
+ """Helper computing the stack usage for a single block."""
+
+ #: Common storage shared by all helpers involved in the stack size computation
+ common: _StackSizeComputationStorage
+
+ #: Block this helper is running the computation for.
+ block: BasicBlock
+
+ #: Current stack usage.
+ size: int
+
+ #: Maximal stack usage.
+ maxsize: int
+
+ #: Minimal stack usage. This value is only relevant in between a TryBegin/TryEnd
+ #: pair and determine the startsize for the exception handling block associated
+ #: with the try begin.
+ minsize: int
+
+ #: Flag indicating if the block analyzed is an exception handler (i.e. a target
+ #: of a TryBegin).
+ exception_handler: Optional[bool]
+
+ #: TryBegin that was encountered before jumping to this block and for which
+ #: no try end was met yet.
+ pending_try_begin: Optional[TryBegin]
+
+ def __init__(
+ self,
+ common: _StackSizeComputationStorage,
+ block: BasicBlock,
+ size: int,
+ maxsize: int,
+ minsize: int,
+ exception_handler: Optional[bool],
+ pending_try_begin: Optional[TryBegin],
+ ) -> None:
+ self.common = common
+ self.block = block
+ self.size = size
+ self.maxsize = maxsize
+ self.minsize = minsize
+ self.exception_handler = exception_handler
+ self.pending_try_begin = pending_try_begin
+ self._current_try_begin = pending_try_begin
+
+ def run(self) -> Generator[Union["_StackSizeComputer", int], int, None]:
+ """Iterate over the block instructions to compute stack usage."""
+ # Blocks are not hashable but in this particular context we know we won't be
+ # modifying blocks in place so we can safely use their id as hash rather than
+ # making them generally hashable which would be weird since they are list
+ # subclasses
+ block_id = id(self.block)
+
+ # If the block is currently being visited (seen = True) or
+ # it was visited previously with parameters that makes the computation
+ # irrelevant return the maxsize.
+ fingerprint = (self.size, self.exception_handler)
+ if id(self.block) in self.common.seen_blocks or (
+ not self._is_stacksize_computation_relevant(block_id, fingerprint)
+ ):
+ yield self.maxsize
+
+ # Prevent recursive visit of block if two blocks are nested (jump from one
+ # to the other).
+ self.common.seen_blocks.add(block_id)
+
+ # Track which size has been used to run an analysis to avoid re-running multiple
+ # times the same calculation.
+ self.common.blocks_startsizes[block_id].add(fingerprint)
+
+ # If this block is an exception handler reached through the exception table
+ # we will push some extra objects on the stack before processing start.
+ if self.exception_handler is not None:
+ self._update_size(0, 1 + self.exception_handler)
+ # True is used to indicated that push_lasti is True, leading to pushing
+ # an extra object on the stack.
+
+ for i, instr in enumerate(self.block):
+ # Ignore SetLineno
+ if isinstance(instr, (SetLineno)):
+ continue
+
+ # When we encounter a TryBegin, we:
+ # - store it as the current TryBegin (since TryBegin cannot be nested)
+ # - record its existence to remember to update its stack size when
+ # the computation ends
+ # - update the minsize to the current size value since we need to
+ # know the minimal stack usage between the TryBegin/TryEnd pair to
+ # set the startsize of the exception handling block
+ #
+ # This approach does not require any special handling for with statements.
+ if isinstance(instr, TryBegin):
+ assert self._current_try_begin is None
+ self.common.try_begins.append(instr)
+ self._current_try_begin = instr
+ self.minsize = self.size
+
+ continue
+
+ elif isinstance(instr, TryEnd):
+ # When we encounter a TryEnd we can start the computation for the
+ # exception block using the minimum stack size encountered since
+ # the TryBegin matching this TryEnd.
+
+ # TryBegin cannot be nested so a TryEnd should always match the
+ # current try begin. However inside the CFG some blocks may
+ # start with a TryEnd relevant only when reaching this block
+ # through a particular jump. So we are lenient here.
+ if instr.entry is not self._current_try_begin:
+ continue
+
+ # Compute the stack usage of the exception handler
+ assert isinstance(instr.entry.target, BasicBlock)
+ yield from self._compute_exception_handler_stack_usage(
+ instr.entry.target,
+ instr.entry.push_lasti,
+ )
+ self._current_try_begin = None
+ continue
+
+ # For instructions with a jump first compute the stacksize required when the
+ # jump is taken.
+ if instr.has_jump():
+ effect = (
+ instr.pre_and_post_stack_effect(jump=True)
+ if self.common.check_pre_and_post
+ else (instr.stack_effect(jump=True), 0)
+ )
+ taken_size, maxsize, minsize = _update_size(
+ *effect, self.size, self.maxsize, self.minsize
+ )
+
+ # Yield the parameters required to compute the stacksize required
+ # by the block to which the jump points to and resume when we now
+ # the maxsize.
+ assert isinstance(instr.arg, BasicBlock)
+ maxsize = yield _StackSizeComputer(
+ self.common,
+ instr.arg,
+ taken_size,
+ maxsize,
+ minsize,
+ None,
+ # Do not propagate the TryBegin if a final instruction is followed
+ # by a TryEnd.
+ None
+ if instr.is_final() and self.block.get_trailing_try_end(i)
+ else self._current_try_begin,
+ )
+
+ # Update the maximum used size by the usage implied by the following
+ # the jump
+ self.maxsize = max(self.maxsize, maxsize)
+
+ # For unconditional jumps abort early since the other instruction will
+ # never be seen.
+ if instr.is_uncond_jump():
+ # Check for TryEnd after the final instruction which is possible
+ # TryEnd being only pseudo instructions
+ if te := self.block.get_trailing_try_end(i):
+ # TryBegin cannot be nested
+ assert te.entry is self._current_try_begin
+
+ assert isinstance(te.entry.target, BasicBlock)
+ yield from self._compute_exception_handler_stack_usage(
+ te.entry.target,
+ te.entry.push_lasti,
+ )
+
+ self.common.seen_blocks.remove(id(self.block))
+ yield self.maxsize
+
+ # jump=False: non-taken path of jumps, or any non-jump
+ effect = (
+ instr.pre_and_post_stack_effect(jump=False)
+ if self.common.check_pre_and_post
+ else (instr.stack_effect(jump=False), 0)
+ )
+ self._update_size(*effect)
+
+ # Instruction is final (return, raise, ...) so any following instruction
+ # in the block is dead code.
+ if instr.is_final():
+ # Check for TryEnd after the final instruction which is possible
+ # TryEnd being only pseudo instructions.
+ if te := self.block.get_trailing_try_end(i):
+ assert isinstance(te.entry.target, BasicBlock)
+ yield from self._compute_exception_handler_stack_usage(
+ te.entry.target,
+ te.entry.push_lasti,
+ )
+
+ self.common.seen_blocks.remove(id(self.block))
+
+ yield self.maxsize
+
+ if self.block.next_block:
+ self.maxsize = yield _StackSizeComputer(
+ self.common,
+ self.block.next_block,
+ self.size,
+ self.maxsize,
+ self.minsize,
+ None,
+ self._current_try_begin,
+ )
+
+ self.common.seen_blocks.remove(id(self.block))
+
+ yield self.maxsize
+
+ # --- Private API
+
+ _current_try_begin: Optional[TryBegin]
+
+ def _update_size(self, pre_delta: int, post_delta: int) -> None:
+ size, maxsize, minsize = _update_size(
+ pre_delta, post_delta, self.size, self.maxsize, self.minsize
+ )
+ self.size = size
+ self.minsize = minsize
+ self.maxsize = maxsize
+
+ def _compute_exception_handler_stack_usage(
+ self, block: BasicBlock, push_lasti: bool
+ ) -> Generator[Union["_StackSizeComputer", int], int, None]:
+ b_id = id(block)
+ if self.minsize < self.common.exception_block_startsize[b_id]:
+ block_size = yield _StackSizeComputer(
+ self.common,
+ block,
+ self.minsize,
+ self.maxsize,
+ self.minsize,
+ push_lasti,
+ None,
+ )
+ # The entry cannot be smaller than abs(stc.minimal_entry_size) as otherwise
+ # we an underflow would have occured.
+ self.common.exception_block_startsize[b_id] = self.minsize
+ self.common.exception_block_maxsize[b_id] = block_size
+
+ def _is_stacksize_computation_relevant(
+ self, block_id: int, fingerprint: Tuple[int, Optional[bool]]
+ ) -> bool:
+ if sys.version_info >= (3, 11):
+ # The computation is relevant if the block was not visited previously
+ # with the same starting size and exception handler status than the
+ # one in use
+ return fingerprint not in self.common.blocks_startsizes[block_id]
+ else:
+ # The computation is relevant if the block was only visited with smaller
+ # starting sizes than the one in use
+ if sizes := self.common.blocks_startsizes[block_id]:
+ return fingerprint[0] > max(f[0] for f in sizes)
+ else:
+ return True
+
+
+class ControlFlowGraph(_bytecode.BaseBytecode):
+ def __init__(self) -> None:
+ super().__init__()
+ self._blocks: List[BasicBlock] = []
+ self._block_index: Dict[int, int] = {}
+ self.argnames: List[str] = []
+
+ self.add_block()
+
+ def legalize(self) -> None:
+ """Legalize all blocks."""
+ current_lineno = self.first_lineno
+ for block in self._blocks:
+ current_lineno = block.legalize(current_lineno)
+
+ def get_block_index(self, block: BasicBlock) -> int:
+ try:
+ return self._block_index[id(block)]
+ except KeyError:
+ raise ValueError("the block is not part of this bytecode")
+
+ def _add_block(self, block: BasicBlock) -> None:
+ block_index = len(self._blocks)
+ self._blocks.append(block)
+ self._block_index[id(block)] = block_index
+
+ def add_block(
+ self, instructions: Optional[Iterable[Union[Instr, SetLineno]]] = None
+ ) -> BasicBlock:
+ block = BasicBlock(instructions)
+ self._add_block(block)
+ return block
+
+ def compute_stacksize(
+ self,
+ *,
+ check_pre_and_post: bool = True,
+ compute_exception_stack_depths: bool = True,
+ ) -> int:
+ """Compute the stack size by iterating through the blocks
+
+ The implementation make use of a generator function to avoid issue with
+ deeply nested recursions.
+
+ """
+ # In the absence of any block return 0
+ if not self:
+ return 0
+
+ # Create the common storage for the calculation
+ common = _StackSizeComputationStorage(
+ check_pre_and_post,
+ seen_blocks=set(),
+ blocks_startsizes={id(b): set() for b in self},
+ exception_block_startsize=dict.fromkeys([id(b) for b in self], 32768),
+ exception_block_maxsize=dict.fromkeys([id(b) for b in self], -32768),
+ try_begins=[],
+ )
+
+ # Starting with Python 3.10, generator and coroutines start with one object
+ # on the stack (None, anything is an error).
+ initial_stack_size = 0
+ if sys.version_info >= (3, 10) and self.flags & (
+ CompilerFlags.GENERATOR
+ | CompilerFlags.COROUTINE
+ | CompilerFlags.ASYNC_GENERATOR
+ ):
+ initial_stack_size = 1
+
+ # Create a generator/coroutine responsible of dealing with the first block
+ coro = _StackSizeComputer(
+ common, self[0], initial_stack_size, 0, 0, None, None
+ ).run()
+
+ # Create a list of generator that have not yet been exhausted
+ coroutines: List[Generator[Union[_StackSizeComputer, int], int, None]] = []
+
+ push_coroutine = coroutines.append
+ pop_coroutine = coroutines.pop
+ args = None
+
+ try:
+ while True:
+ # Mypy does not seem to honor the fact that one must send None
+ # to a brand new generator irrespective of its send type.
+ args = coro.send(None) # type: ignore
+
+ # Consume the stored generators as long as they return a simple
+ # integer that is to be used to resume the last stored generator.
+ while isinstance(args, int):
+ coro = pop_coroutine()
+ args = coro.send(args)
+
+ # Otherwise we enter a new block and we store the generator under
+ # use and create a new one to process the new block
+ push_coroutine(coro)
+ coro = args.run()
+
+ except IndexError:
+ # The exception occurs when all the generators have been exhausted
+ # in which case the last yielded value is the stacksize.
+ assert args is not None and isinstance(args, int)
+
+ # Exception handling block size is reported separately since we need
+ # to report only the stack usage for the smallest start size for the
+ # block
+ args = max(args, *common.exception_block_maxsize.values())
+
+ # Check if there is dead code that may contain TryBegin/TryEnd pairs.
+ # For any such pair we set a huge size (the exception table format does not
+ # mandate a maximum value). We do so so that if the pair is fused with
+ # another it does not alter the computed size.
+ for block in self:
+ if not common.blocks_startsizes[id(block)]:
+ for i in block:
+ if isinstance(i, TryBegin) and i.stack_depth is UNSET:
+ i.stack_depth = 32768
+
+ # If requested update the TryBegin stack size
+ if compute_exception_stack_depths:
+ for tb in common.try_begins:
+ size = common.exception_block_startsize[id(tb.target)]
+ assert size >= 0
+ tb.stack_depth = size
+
+ return args
+
+ def __repr__(self) -> str:
+ return "" % len(self._blocks)
+
+ # Helper to obtain a flat list of instr, which does not refer to block at
+ # anymore. Used for comparison of different CFG.
+ def _get_instructions(
+ self,
+ ) -> List:
+ instructions: List = []
+ try_begins: Dict[TryBegin, int] = {}
+
+ for block in self:
+ for index, instr in enumerate(block):
+ if isinstance(instr, TryBegin):
+ assert isinstance(instr.target, BasicBlock)
+ try_begins.setdefault(instr, len(try_begins))
+ instructions.append(
+ (
+ "TryBegin",
+ try_begins[instr],
+ self.get_block_index(instr.target),
+ instr.push_lasti,
+ )
+ )
+ elif isinstance(instr, TryEnd):
+ instructions.append(("TryEnd", try_begins[instr.entry]))
+ elif isinstance(instr, Instr) and (
+ instr.has_jump() or instr.is_final()
+ ):
+ if instr.has_jump():
+ target_block = instr.arg
+ assert isinstance(target_block, BasicBlock)
+ # We use a concrete instr here to be able to use an integer as
+ # argument rather than a Label. This is fine for comparison
+ # purposes which is our sole goal here.
+ c_instr = ConcreteInstr(
+ instr.name,
+ self.get_block_index(target_block),
+ location=instr.location,
+ )
+ instructions.append(c_instr)
+ else:
+ instructions.append(instr)
+
+ if te := block.get_trailing_try_end(index):
+ instructions.append(("TryEnd", try_begins[te.entry]))
+ break
+ else:
+ instructions.append(instr)
+
+ return instructions
+
+ def __eq__(self, other: Any) -> bool:
+ if type(self) is not type(other):
+ return False
+
+ if self.argnames != other.argnames:
+ return False
+
+ instrs1 = self._get_instructions()
+ instrs2 = other._get_instructions()
+ if instrs1 != instrs2:
+ return False
+ # FIXME: compare block.next_block
+
+ return super().__eq__(other)
+
+ def __len__(self) -> int:
+ return len(self._blocks)
+
+ def __iter__(self) -> Iterator[BasicBlock]:
+ return iter(self._blocks)
+
+ @overload
+ def __getitem__(self, index: Union[int, BasicBlock]) -> BasicBlock:
+ ...
+
+ @overload
+ def __getitem__(self: U, index: slice) -> U:
+ ...
+
+ def __getitem__(self, index):
+ if isinstance(index, BasicBlock):
+ index = self.get_block_index(index)
+ return self._blocks[index]
+
+ def __delitem__(self, index: Union[int, BasicBlock]) -> None:
+ if isinstance(index, BasicBlock):
+ index = self.get_block_index(index)
+ block = self._blocks[index]
+ del self._blocks[index]
+ del self._block_index[id(block)]
+ for index in range(index, len(self)):
+ block = self._blocks[index]
+ self._block_index[id(block)] -= 1
+
+ def split_block(self, block: BasicBlock, index: int) -> BasicBlock:
+ if not isinstance(block, BasicBlock):
+ raise TypeError("expected block")
+ block_index = self.get_block_index(block)
+
+ if index < 0:
+ raise ValueError("index must be positive")
+
+ block = self._blocks[block_index]
+ if index == 0:
+ return block
+
+ if index > len(block):
+ raise ValueError("index out of the block")
+
+ instructions = block[index:]
+ if not instructions:
+ if block_index + 1 < len(self):
+ return self[block_index + 1]
+
+ del block[index:]
+
+ block2 = BasicBlock(instructions)
+ block.next_block = block2
+
+ for block in self[block_index + 1 :]:
+ self._block_index[id(block)] += 1
+
+ self._blocks.insert(block_index + 1, block2)
+ self._block_index[id(block2)] = block_index + 1
+
+ return block2
+
+ def get_dead_blocks(self) -> List[BasicBlock]:
+ if not self:
+ return []
+
+ seen_block_ids = set()
+ stack = [self[0]]
+ while stack:
+ block = stack.pop()
+ if id(block) in seen_block_ids:
+ continue
+ seen_block_ids.add(id(block))
+ for i in block:
+ if isinstance(i, Instr) and isinstance(i.arg, BasicBlock):
+ stack.append(i.arg)
+ elif isinstance(i, TryBegin):
+ assert isinstance(i.target, BasicBlock)
+ stack.append(i.target)
+
+ return [b for b in self if id(b) not in seen_block_ids]
+
+ @staticmethod
+ def from_bytecode(bytecode: _bytecode.Bytecode) -> "ControlFlowGraph":
+ # label => instruction index
+ label_to_block_index = {}
+ jumps = []
+ try_end_locations = {}
+ for index, instr in enumerate(bytecode):
+ if isinstance(instr, Label):
+ label_to_block_index[instr] = index
+ elif isinstance(instr, Instr) and isinstance(instr.arg, Label):
+ jumps.append((index, instr.arg))
+ elif isinstance(instr, TryBegin):
+ assert isinstance(instr.target, Label)
+ jumps.append((index, instr.target))
+ elif isinstance(instr, TryEnd):
+ try_end_locations[instr.entry] = index
+
+ # Figure out on which index block targeted by a label start
+ block_starts = {}
+ for target_index, target_label in jumps:
+ target_index = label_to_block_index[target_label]
+ block_starts[target_index] = target_label
+
+ bytecode_blocks = ControlFlowGraph()
+ bytecode_blocks._copy_attr_from(bytecode)
+ bytecode_blocks.argnames = list(bytecode.argnames)
+
+ # copy instructions, convert labels to block labels
+ block = bytecode_blocks[0]
+ labels = {}
+ jumping_instrs: List[Instr] = []
+ # Map input TryBegin to CFG TryBegins (split across blocks may yield multiple
+ # TryBegin from a single in the bytecode).
+ try_begins: Dict[TryBegin, list[TryBegin]] = {}
+ # Storage for TryEnds that need to be inserted at the beginning of a block.
+ # We use a list because the same block can be reached through several paths
+ # with different active TryBegins
+ add_try_end: Dict[Label, List[TryEnd]] = defaultdict(list)
+
+ # Track the currently active try begin
+ active_try_begin: Optional[TryBegin] = None
+ try_begin_inserted_in_block = False
+ last_instr: Optional[Instr] = None
+ for index, instr in enumerate(bytecode):
+ # Reference to the current block if we create a new one in the following.
+ old_block: BasicBlock | None = None
+
+ # First we determine if we need to create a new block:
+ # - by checking the current instruction index
+ if index in block_starts:
+ old_label = block_starts[index]
+ # Create a new block if the last created one is not empty
+ # (of real instructions)
+ if index != 0 and (li := block.get_last_non_artificial_instruction()):
+ old_block = block
+ new_block = bytecode_blocks.add_block()
+ # If the last non artificial instruction is not final connect
+ # this block to the next.
+ if not li.is_final():
+ block.next_block = new_block
+ block = new_block
+ if old_label is not None:
+ labels[old_label] = block
+
+ # - by inspecting the last instr
+ elif block.get_last_non_artificial_instruction() and last_instr is not None:
+ # The last instruction is final but we did not create a block
+ # -> sounds like a block of dead code but we preserve it
+ if last_instr.is_final():
+ old_block = block
+ block = bytecode_blocks.add_block()
+
+ # We are dealing with a conditional jump
+ elif last_instr.has_jump():
+ assert isinstance(last_instr.arg, Label)
+ old_block = block
+ new_block = bytecode_blocks.add_block()
+ block.next_block = new_block
+ block = new_block
+
+ # If we created a new block, we check:
+ # - if the current instruction is a TryEnd and if the last instruction
+ # is final in which case we insert the TryEnd in the old block.
+ # - if we have a currently active TryBegin for which we may need to
+ # create a TryEnd in the previous block and a new TryBegin in the
+ # new one because the blocks are not connected.
+ if old_block is not None:
+ temp = try_begin_inserted_in_block
+ try_begin_inserted_in_block = False
+
+ if old_block is not None and last_instr is not None:
+ # The last instruction is final, if the current instruction is a
+ # TryEnd insert it in the same block and move to the next instruction
+ if last_instr.is_final() and isinstance(instr, TryEnd):
+ assert active_try_begin
+ nte = instr.copy()
+ nte.entry = try_begins[active_try_begin][-1]
+ old_block.append(nte)
+ active_try_begin = None
+ continue
+
+ # If we have an active TryBegin and last_instr is:
+ elif active_try_begin is not None:
+ # - a jump whose target is beyond the TryEnd of the active
+ # TryBegin: we remember TryEnd should be prepended to the
+ # target block.
+ if (
+ last_instr.has_jump()
+ and active_try_begin in try_end_locations
+ and (
+ # last_instr is a jump so arg is a Label
+ label_to_block_index[last_instr.arg] # type: ignore
+ >= try_end_locations[active_try_begin]
+ )
+ ):
+ assert isinstance(last_instr.arg, Label)
+ add_try_end[last_instr.arg].append(
+ TryEnd(try_begins[active_try_begin][-1])
+ )
+
+ # - final and the try begin originate from the current block:
+ # we insert a TryEnd in the old block and a new TryBegin in
+ # the new one since the blocks are disconnected.
+ if last_instr.is_final() and temp:
+ old_block.append(TryEnd(try_begins[active_try_begin][-1]))
+ new_tb = TryBegin(
+ active_try_begin.target, active_try_begin.push_lasti
+ )
+ block.append(new_tb)
+ # Add this new TryBegin to the map to properly update
+ # the target.
+ try_begins[active_try_begin].append(new_tb)
+ try_begin_inserted_in_block = True
+
+ last_instr = None
+
+ if isinstance(instr, Label):
+ continue
+
+ # don't copy SetLineno objects
+ if isinstance(instr, (Instr, TryBegin, TryEnd)):
+ new = instr.copy()
+ if isinstance(instr, TryBegin):
+ assert active_try_begin is None
+ active_try_begin = instr
+ try_begin_inserted_in_block = True
+ assert isinstance(new, TryBegin)
+ try_begins[instr] = [new]
+ elif isinstance(instr, TryEnd):
+ assert isinstance(new, TryEnd)
+ new.entry = try_begins[instr.entry][-1]
+ active_try_begin = None
+ try_begin_inserted_in_block = False
+ else:
+ last_instr = instr
+ if isinstance(instr.arg, Label):
+ assert isinstance(new, Instr)
+ jumping_instrs.append(new)
+
+ instr = new
+
+ block.append(instr)
+
+ # Insert the necessary TryEnds at the beginning of block that were marked
+ # (if we did not already insert an equivalent TryEnd earlier).
+ for lab, tes in add_try_end.items():
+ block = labels[lab]
+ existing_te_entries = set()
+ index = 0
+ # We use a while loop since the block cannot yet be iterated on since
+ # jumps still use labels instead of blocks
+ while index < len(block):
+ i = block[index]
+ index += 1
+ if isinstance(i, TryEnd):
+ existing_te_entries.add(i.entry)
+ else:
+ break
+ for te in tes:
+ if te.entry not in existing_te_entries:
+ labels[lab].insert(0, te)
+ existing_te_entries.add(te.entry)
+
+ # Replace labels by block in jumping instructions
+ for instr in jumping_instrs:
+ label = instr.arg
+ assert isinstance(label, Label)
+ instr.arg = labels[label]
+
+ # Replace labels by block in TryBegin
+ for b_tb, c_tbs in try_begins.items():
+ label = b_tb.target
+ assert isinstance(label, Label)
+ for c_tb in c_tbs:
+ c_tb.target = labels[label]
+
+ return bytecode_blocks
+
+ def to_bytecode(self) -> _bytecode.Bytecode:
+ """Convert to Bytecode."""
+
+ used_blocks = set()
+ for block in self:
+ target_block = block.get_jump()
+ if target_block is not None:
+ used_blocks.add(id(target_block))
+
+ for tb in (i for i in block if isinstance(i, TryBegin)):
+ used_blocks.add(id(tb.target))
+
+ labels = {}
+ jumps = []
+ try_begins = {}
+ seen_try_end: Set[TryBegin] = set()
+ instructions: List[Union[Instr, Label, TryBegin, TryEnd, SetLineno]] = []
+
+ # Track the last seen TryBegin and TryEnd to be able to fuse adjacent
+ # TryEnd/TryBegin pair which share the same target.
+ # In each case, we store the value found in the CFG and the value
+ # inserted in the bytecode.
+ last_try_begin: tuple[TryBegin, TryBegin] | None = None
+ last_try_end: tuple[TryEnd, TryEnd] | None = None
+
+ for block in self:
+ if id(block) in used_blocks:
+ new_label = Label()
+ labels[id(block)] = new_label
+ instructions.append(new_label)
+
+ for instr in block:
+ # don't copy SetLineno objects
+ if isinstance(instr, (Instr, TryBegin, TryEnd)):
+ new = instr.copy()
+ if isinstance(instr, TryBegin):
+ # If due to jumps and split TryBegin, we encounter a TryBegin
+ # while we still have a TryBegin ensure they can be fused.
+ if last_try_begin is not None:
+ cfg_tb, byt_tb = last_try_begin
+ assert instr.target is cfg_tb.target
+ assert instr.push_lasti == cfg_tb.push_lasti
+ byt_tb.stack_depth = min(
+ byt_tb.stack_depth, instr.stack_depth
+ )
+
+ # If the TryBegin share the target and push_lasti of the
+ # entry of an adjacent TryEnd, omit the new TryBegin that
+ # was inserted to allow analysis of the CFG and remove
+ # the already inserted TryEnd.
+ if last_try_end is not None:
+ cfg_te, byt_te = last_try_end
+ entry = cfg_te.entry
+ if (
+ entry.target is instr.target
+ and entry.push_lasti == instr.push_lasti
+ ):
+ # If we did not yet compute the required stack depth
+ # keep the value as UNSET
+ if entry.stack_depth is UNSET:
+ assert instr.stack_depth is UNSET
+ byt_te.entry.stack_depth = UNSET
+ else:
+ byt_te.entry.stack_depth = min(
+ entry.stack_depth, instr.stack_depth
+ )
+ try_begins[instr] = byt_te.entry
+ instructions.remove(byt_te)
+ continue
+ assert isinstance(new, TryBegin)
+ try_begins[instr] = new
+ last_try_begin = (instr, new)
+ last_try_end = None
+ elif isinstance(instr, TryEnd):
+ # Only keep the first seen TryEnd matching a TryBegin
+ assert isinstance(new, TryEnd)
+ if instr.entry in seen_try_end:
+ continue
+ seen_try_end.add(instr.entry)
+ new.entry = try_begins[instr.entry]
+ last_try_begin = None
+ last_try_end = (instr, new)
+ elif isinstance(instr.arg, BasicBlock):
+ assert isinstance(new, Instr)
+ jumps.append(new)
+ last_try_end = None
+ else:
+ last_try_end = None
+
+ instr = new
+
+ instructions.append(instr)
+
+ # Map to new labels
+ for instr in jumps:
+ instr.arg = labels[id(instr.arg)]
+
+ for tb in set(try_begins.values()):
+ tb.target = labels[id(tb.target)]
+
+ bytecode = _bytecode.Bytecode()
+ bytecode._copy_attr_from(self)
+ bytecode.argnames = list(self.argnames)
+ bytecode[:] = instructions
+
+ return bytecode
+
+ def to_code(
+ self,
+ stacksize: Optional[int] = None,
+ *,
+ check_pre_and_post: bool = True,
+ compute_exception_stack_depths: bool = True,
+ ) -> types.CodeType:
+ """Convert to code."""
+ if stacksize is None:
+ stacksize = self.compute_stacksize(
+ check_pre_and_post=check_pre_and_post,
+ compute_exception_stack_depths=compute_exception_stack_depths,
+ )
+ bc = self.to_bytecode()
+ return bc.to_code(
+ stacksize=stacksize,
+ check_pre_and_post=False,
+ compute_exception_stack_depths=False,
+ )
diff --git a/lambdas/aws-dd-forwarder-3.127.0/bytecode/concrete.py b/lambdas/aws-dd-forwarder-3.127.0/bytecode/concrete.py
new file mode 100644
index 0000000..4908e1c
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/bytecode/concrete.py
@@ -0,0 +1,1419 @@
+import dis
+import inspect
+import opcode as _opcode
+import struct
+import sys
+import types
+from typing import (
+ Any,
+ Dict,
+ Iterable,
+ Iterator,
+ List,
+ MutableSequence,
+ Optional,
+ Sequence,
+ Set,
+ Tuple,
+ Type,
+ TypeVar,
+ Union,
+)
+
+# alias to keep the 'bytecode' variable free
+import bytecode as _bytecode
+from bytecode.flags import CompilerFlags
+from bytecode.instr import (
+ _UNSET,
+ BITFLAG2_INSTRUCTIONS,
+ BITFLAG_INSTRUCTIONS,
+ INTRINSIC,
+ INTRINSIC_1OP,
+ INTRINSIC_2OP,
+ PLACEHOLDER_LABEL,
+ UNSET,
+ BaseInstr,
+ CellVar,
+ Compare,
+ FreeVar,
+ Instr,
+ InstrArg,
+ InstrLocation,
+ Intrinsic1Op,
+ Intrinsic2Op,
+ Label,
+ SetLineno,
+ TryBegin,
+ TryEnd,
+ _check_arg_int,
+ const_key,
+ opcode_has_argument,
+)
+
+# - jumps use instruction
+# - lineno use bytes (dis.findlinestarts(code))
+# - dis displays bytes
+OFFSET_AS_INSTRUCTION = sys.version_info >= (3, 10)
+
+
+def _set_docstring(code: _bytecode.BaseBytecode, consts: Sequence) -> None:
+ if not consts:
+ return
+ first_const = consts[0]
+ if isinstance(first_const, str) or first_const is None:
+ code.docstring = first_const
+
+
+T = TypeVar("T", bound="ConcreteInstr")
+
+
+class ConcreteInstr(BaseInstr[int]):
+ """Concrete instruction.
+
+ arg must be an integer in the range 0..2147483647.
+
+ It has a read-only size attribute.
+
+ """
+
+ # For ConcreteInstr the argument is always an integer
+ _arg: int
+
+ __slots__ = ("_size", "_extended_args")
+
+ def __init__(
+ self,
+ name: str,
+ arg: int = UNSET,
+ *,
+ lineno: Union[int, None, _UNSET] = UNSET,
+ location: Optional[InstrLocation] = None,
+ extended_args: Optional[int] = None,
+ ):
+ # Allow to remember a potentially meaningless EXTENDED_ARG emitted by
+ # Python to properly compute the size and avoid messing up the jump
+ # targets
+ self._extended_args = extended_args
+ super().__init__(name, arg, lineno=lineno, location=location)
+
+ def _check_arg(self, name: str, opcode: int, arg: int) -> None:
+ if opcode_has_argument(opcode):
+ if arg is UNSET:
+ raise ValueError("operation %s requires an argument" % name)
+
+ _check_arg_int(arg, name)
+ # opcode == 0 corresponds to CACHE instruction in 3.11+ and was unused before
+ elif opcode == 0:
+ arg = arg if arg is not UNSET else 0
+ _check_arg_int(arg, name)
+ else:
+ if arg is not UNSET:
+ raise ValueError("operation %s has no argument" % name)
+
+ def _set(
+ self,
+ name: str,
+ arg: int,
+ ) -> None:
+ super()._set(name, arg)
+ size = 2
+ if arg is not UNSET:
+ while arg > 0xFF:
+ size += 2
+ arg >>= 8
+ if self._extended_args is not None:
+ size = 2 + 2 * self._extended_args
+ self._size = size
+
+ @property
+ def size(self) -> int:
+ return self._size
+
+ def _cmp_key(self) -> Tuple[Optional[InstrLocation], str, int]:
+ return (self._location, self._name, self._arg)
+
+ def get_jump_target(self, instr_offset: int) -> Optional[int]:
+ # When a jump arg is zero the jump always points to the first non-CACHE
+ # opcode following the jump. The passed in offset is the offset at
+ # which the jump opcode starts. So to compute the target, we add to it
+ # the instruction size (accounting for extended args) and the
+ # number of caches expected to follow the jump instruction.
+ s = (
+ (self._size // 2) if OFFSET_AS_INSTRUCTION else self._size
+ ) + self.use_cache_opcodes()
+ if self.is_forward_rel_jump():
+ return instr_offset + s + self._arg
+ if self.is_backward_rel_jump():
+ return instr_offset + s - self._arg
+ if self.is_abs_jump():
+ return self._arg
+ return None
+
+ def assemble(self) -> bytes:
+ if self._arg is UNSET:
+ return bytes((self._opcode, 0))
+
+ arg = self._arg
+ b = [self._opcode, arg & 0xFF]
+ while arg > 0xFF:
+ arg >>= 8
+ b[:0] = [_opcode.EXTENDED_ARG, arg & 0xFF]
+
+ if self._extended_args:
+ while len(b) < self._size:
+ b[:0] = [_opcode.EXTENDED_ARG, 0x00]
+
+ return bytes(b)
+
+ @classmethod
+ def disassemble(cls: Type[T], lineno: Optional[int], code: bytes, offset: int) -> T:
+ index = 2 * offset if OFFSET_AS_INSTRUCTION else offset
+ op = code[index]
+ if opcode_has_argument(op):
+ arg = code[index + 1]
+ else:
+ arg = UNSET
+ name = _opcode.opname[op]
+ return cls(name, arg, lineno=lineno)
+
+ def use_cache_opcodes(self) -> int:
+ return (
+ # Not supposed to be used but we need it
+ dis._inline_cache_entries[self._opcode] # type: ignore
+ if sys.version_info >= (3, 11)
+ else 0
+ )
+
+
+class ExceptionTableEntry:
+ """Entry for a given line in the exception table.
+
+ All offset are expressed in instructions not in bytes.
+
+ """
+
+ #: Offset in instruction between the beginning of the bytecode and the beginning
+ #: of this entry.
+ start_offset: int
+
+ #: Offset in instruction between the beginning of the bytecode and the end
+ #: of this entry. This offset is inclusive meaning that the instruction it points
+ #: to is included in the try/except handling.
+ stop_offset: int
+
+ #: Offset in instruction to the first instruction of the exception handling block.
+ target: int
+
+ #: Minimal stack depth in the block delineated by start and stop
+ #: offset of the exception table entry. Used to restore the stack (by
+ #: popping items) when entering the exception handling block.
+ stack_depth: int
+
+ #: Should the offset, at which an exception was raised, be pushed on the stack
+ #: before the exception itself (which is pushed as a single value)).
+ push_lasti: bool
+
+ __slots__ = ("start_offset", "stop_offset", "target", "stack_depth", "push_lasti")
+
+ def __init__(
+ self,
+ start_offset: int,
+ stop_offset: int,
+ target: int,
+ stack_depth: int,
+ push_lasti: bool,
+ ) -> None:
+ self.start_offset = start_offset
+ self.stop_offset = stop_offset
+ self.target = target
+ self.stack_depth = stack_depth
+ self.push_lasti = push_lasti
+
+ def __repr__(self) -> str:
+ return (
+ "ExceptionTableEntry("
+ f"start_offset={self.start_offset}, "
+ f"stop_offset={self.stop_offset}, "
+ f"target={self.target}, "
+ f"stack_depth={self.stack_depth}, "
+ f"push_lasti={self.push_lasti}"
+ )
+
+
+class ConcreteBytecode(_bytecode._BaseBytecodeList[Union[ConcreteInstr, SetLineno]]):
+ #: List of "constant" objects for the bytecode
+ consts: List
+
+ #: List of names used by local variables.
+ names: List[str]
+
+ #: List of names used by input variables.
+ varnames: List[str]
+
+ #: Table describing portion of the bytecode in which exceptions are caught and
+ #: where there are handled.
+ #: Used only in Python 3.11+
+ exception_table: List[ExceptionTableEntry]
+
+ def __init__(
+ self,
+ instructions=(),
+ *,
+ consts: tuple = (),
+ names: Tuple[str, ...] = (),
+ varnames: Iterable[str] = (),
+ exception_table: Optional[List[ExceptionTableEntry]] = None,
+ ):
+ super().__init__()
+ self.consts = list(consts)
+ self.names = list(names)
+ self.varnames = list(varnames)
+ self.exception_table = exception_table or []
+ for instr in instructions:
+ self._check_instr(instr)
+ self.extend(instructions)
+
+ def __iter__(self) -> Iterator[Union[ConcreteInstr, SetLineno]]:
+ instructions = super().__iter__()
+ for instr in instructions:
+ self._check_instr(instr)
+ yield instr
+
+ def _check_instr(self, instr: Any) -> None:
+ if not isinstance(instr, (ConcreteInstr, SetLineno)):
+ raise ValueError(
+ "ConcreteBytecode must only contain "
+ "ConcreteInstr and SetLineno objects, "
+ "but %s was found" % type(instr).__name__
+ )
+
+ def _copy_attr_from(self, bytecode):
+ super()._copy_attr_from(bytecode)
+ if isinstance(bytecode, ConcreteBytecode):
+ self.consts = bytecode.consts
+ self.names = bytecode.names
+ self.varnames = bytecode.varnames
+
+ def __repr__(self) -> str:
+ return "" % len(self)
+
+ def __eq__(self, other: Any) -> bool:
+ if type(self) is not type(other):
+ return False
+
+ const_keys1 = list(map(const_key, self.consts))
+ const_keys2 = list(map(const_key, other.consts))
+ if const_keys1 != const_keys2:
+ return False
+
+ if self.names != other.names:
+ return False
+ if self.varnames != other.varnames:
+ return False
+
+ return super().__eq__(other)
+
+ @staticmethod
+ def from_code(
+ code: types.CodeType, *, extended_arg: bool = False
+ ) -> "ConcreteBytecode":
+ instructions: MutableSequence[Union[SetLineno, ConcreteInstr]]
+ # For Python 3.11+ we use dis to extract the detailed location information at
+ # reduced maintenance cost.
+ if sys.version_info >= (3, 11):
+ instructions = [
+ # dis.get_instructions automatically handle extended arg which
+ # we do not want, so we fold back arguments to be between 0 and 255
+ ConcreteInstr(
+ i.opname,
+ i.arg % 256 if i.arg is not None else UNSET,
+ location=InstrLocation.from_positions(i.positions)
+ if i.positions
+ else None,
+ )
+ for i in dis.get_instructions(code, show_caches=True)
+ ]
+ else:
+ if sys.version_info >= (3, 10):
+ line_starts = dict(
+ (offset, lineno) for offset, _, lineno in code.co_lines()
+ )
+ else:
+ line_starts = dict(dis.findlinestarts(code))
+
+ # find block starts
+ instructions = []
+ offset = 0
+ lineno: Optional[int] = code.co_firstlineno
+ while offset < (len(code.co_code) // (2 if OFFSET_AS_INSTRUCTION else 1)):
+ lineno_off = (2 * offset) if OFFSET_AS_INSTRUCTION else offset
+ if lineno_off in line_starts:
+ lineno = line_starts[lineno_off]
+
+ instr = ConcreteInstr.disassemble(lineno, code.co_code, offset)
+
+ instructions.append(instr)
+ offset += (instr.size // 2) if OFFSET_AS_INSTRUCTION else instr.size
+
+ bytecode = ConcreteBytecode()
+
+ # HINT : in some cases Python generate useless EXTENDED_ARG opcode
+ # with a value of zero. Such opcodes do not increases the size of the
+ # following opcode the way a normal EXTENDED_ARG does. As a
+ # consequence, they need to be tracked manually as otherwise the
+ # offsets in jump targets can end up being wrong.
+ if not extended_arg:
+ # The list is modified in place
+ bytecode._remove_extended_args(instructions)
+
+ bytecode.name = code.co_name
+ bytecode.filename = code.co_filename
+ bytecode.flags = CompilerFlags(code.co_flags)
+ bytecode.argcount = code.co_argcount
+ bytecode.posonlyargcount = code.co_posonlyargcount
+ bytecode.kwonlyargcount = code.co_kwonlyargcount
+ bytecode.first_lineno = code.co_firstlineno
+ bytecode.names = list(code.co_names)
+ bytecode.consts = list(code.co_consts)
+ bytecode.varnames = list(code.co_varnames)
+ bytecode.freevars = list(code.co_freevars)
+ bytecode.cellvars = list(code.co_cellvars)
+ _set_docstring(bytecode, code.co_consts)
+ if sys.version_info >= (3, 11):
+ bytecode.exception_table = bytecode._parse_exception_table(
+ code.co_exceptiontable
+ )
+ bytecode.qualname = code.co_qualname
+ else:
+ bytecode.qualname = bytecode.qualname
+
+ bytecode[:] = instructions
+ return bytecode
+
+ @staticmethod
+ def _normalize_lineno(
+ instructions: Sequence[Union[ConcreteInstr, SetLineno]], first_lineno: int
+ ) -> Iterator[Tuple[int, ConcreteInstr]]:
+ lineno = first_lineno
+ # For each instruction compute an "inherited" lineno used:
+ # - on 3.8 and 3.9 for which a lineno is mandatory
+ # - to infer a lineno on 3.10+ if no lineno was provided
+ for instr in instructions:
+ i_lineno = instr.lineno
+ # if instr.lineno is not set, it's inherited from the previous
+ # instruction, or from self.first_lineno
+ if i_lineno is not None and i_lineno is not UNSET:
+ lineno = i_lineno
+
+ if isinstance(instr, ConcreteInstr):
+ yield (lineno, instr)
+
+ def _assemble_code(
+ self,
+ ) -> Tuple[bytes, List[Tuple[int, int, int, Optional[InstrLocation]]]]:
+ offset = 0
+ code_str = []
+ linenos = []
+ for lineno, instr in self._normalize_lineno(self, self.first_lineno):
+ code_str.append(instr.assemble())
+ i_size = instr.size
+ linenos.append(
+ (
+ (offset * 2) if OFFSET_AS_INSTRUCTION else offset,
+ i_size,
+ lineno,
+ instr.location,
+ )
+ )
+ offset += (i_size // 2) if OFFSET_AS_INSTRUCTION else i_size
+
+ return (b"".join(code_str), linenos)
+
+ # Used on 3.8 and 3.9
+ @staticmethod
+ def _assemble_lnotab(
+ first_lineno: int, linenos: List[Tuple[int, int, int, Optional[InstrLocation]]]
+ ) -> bytes:
+ lnotab = []
+ old_offset = 0
+ old_lineno = first_lineno
+ for offset, _, lineno, _ in linenos:
+ dlineno = lineno - old_lineno
+ if dlineno == 0:
+ continue
+ old_lineno = lineno
+
+ doff = offset - old_offset
+ old_offset = offset
+
+ while doff > 255:
+ lnotab.append(b"\xff\x00")
+ doff -= 255
+
+ while dlineno < -128:
+ lnotab.append(struct.pack("Bb", doff, -128))
+ doff = 0
+ dlineno -= -128
+
+ while dlineno > 127:
+ lnotab.append(struct.pack("Bb", doff, 127))
+ doff = 0
+ dlineno -= 127
+
+ assert 0 <= doff <= 255
+ assert -128 <= dlineno <= 127
+
+ lnotab.append(struct.pack("Bb", doff, dlineno))
+
+ return b"".join(lnotab)
+
+ @staticmethod
+ def _pack_linetable(
+ linetable: List[bytes], doff: int, dlineno: Optional[int]
+ ) -> None:
+ if dlineno is not None:
+ # Ensure linenos are between -126 and +126, by using 127 lines jumps with
+ # a 0 byte offset
+ while dlineno < -127:
+ linetable.append(struct.pack("Bb", 0, -127))
+ dlineno -= -127
+
+ while dlineno > 127:
+ linetable.append(struct.pack("Bb", 0, 127))
+ dlineno -= 127
+
+ assert -127 <= dlineno <= 127
+ else:
+ dlineno = -128
+
+ # Ensure offsets are less than 255.
+ # If an offset is larger, we first mark the line change with an offset of 254
+ # then use as many 254 offset with no line change to reduce the offset to
+ # less than 254.
+ if doff > 254:
+ linetable.append(struct.pack("Bb", 254, dlineno))
+ doff -= 254
+
+ while doff > 254:
+ linetable.append(b"\xfe\x00")
+ doff -= 254
+ linetable.append(struct.pack("Bb", doff, 0))
+
+ else:
+ linetable.append(struct.pack("Bb", doff, dlineno))
+
+ assert 0 <= doff <= 254
+
+ # Used on 3.10
+ def _assemble_linestable(
+ self,
+ first_lineno: int,
+ linenos: Iterable[Tuple[int, int, int, Optional[InstrLocation]]],
+ ) -> bytes:
+ if not linenos:
+ return b""
+
+ linetable: List[bytes] = []
+ old_offset = 0
+
+ iter_in = iter(linenos)
+
+ offset, i_size, old_lineno, old_location = next(iter_in)
+ if old_location is not None:
+ old_dlineno = (
+ old_location.lineno - first_lineno
+ if old_location.lineno is not None
+ else None
+ )
+ else:
+ old_dlineno = old_lineno - first_lineno
+
+ for offset, i_size, lineno, location in iter_in:
+ if location is not None:
+ dlineno = (
+ location.lineno - old_lineno
+ if location.lineno is not None
+ else None
+ )
+ else:
+ dlineno = lineno - old_lineno
+
+ if dlineno == 0 or (old_dlineno is None and dlineno is None):
+ continue
+ old_lineno = lineno
+
+ doff = offset - old_offset
+ old_offset = offset
+
+ self._pack_linetable(linetable, doff, old_dlineno)
+ old_dlineno = dlineno
+
+ # Pack the line of the last instruction.
+ doff = offset + i_size - old_offset
+ self._pack_linetable(linetable, doff, old_dlineno)
+
+ return b"".join(linetable)
+
+ # The formats are describes in CPython/Objects/locations.md
+ @staticmethod
+ def _encode_location_varint(varint: int) -> bytearray:
+ encoded = bytearray()
+ # We encode on 6 bits
+ while True:
+ encoded.append(varint & 0x3F)
+ varint >>= 6
+ if varint:
+ encoded[-1] |= 0x40 # bit 6 is set except on the last entry
+ else:
+ break
+ return encoded
+
+ def _encode_location_svarint(self, svarint: int) -> bytearray:
+ if svarint < 0:
+ return self._encode_location_varint(((-svarint) << 1) | 1)
+ else:
+ return self._encode_location_varint(svarint << 1)
+
+ # Python 3.11+ location format encoding
+ @staticmethod
+ def _pack_location_header(code: int, size: int) -> int:
+ return (1 << 7) + (code << 3) + (size - 1 if size <= 8 else 7)
+
+ def _pack_location(
+ self, size: int, lineno: int, location: Optional[InstrLocation]
+ ) -> bytearray:
+ packed = bytearray()
+
+ l_lineno: Optional[int]
+ # The location was not set so we infer a line.
+ if location is None:
+ l_lineno, end_lineno, col_offset, end_col_offset = (
+ lineno,
+ None,
+ None,
+ None,
+ )
+ else:
+ l_lineno, end_lineno, col_offset, end_col_offset = (
+ location.lineno,
+ location.end_lineno,
+ location.col_offset,
+ location.end_col_offset,
+ )
+
+ # We have no location information so the code is 15
+ if l_lineno is None:
+ packed.append(self._pack_location_header(15, size))
+
+ # No column info, code 13
+ elif col_offset is None:
+ if end_lineno is not None and end_lineno != l_lineno:
+ raise ValueError(
+ "An instruction cannot have no column offset and span "
+ f"multiple lines (lineno: {l_lineno}, end lineno: {end_lineno}"
+ )
+ packed.extend(
+ (
+ self._pack_location_header(13, size),
+ *self._encode_location_svarint(l_lineno - lineno),
+ )
+ )
+
+ # We enforce the end_lineno to be defined
+ else:
+ assert end_lineno is not None
+ assert end_col_offset is not None
+
+ # Short forms
+ if (
+ end_lineno == l_lineno
+ and l_lineno - lineno == 0
+ and col_offset < 72
+ and (end_col_offset - col_offset) <= 15
+ ):
+ packed.extend(
+ (
+ self._pack_location_header(col_offset // 8, size),
+ ((col_offset % 8) << 4) + (end_col_offset - col_offset),
+ )
+ )
+
+ # One line form
+ elif (
+ end_lineno == l_lineno
+ and l_lineno - lineno in (1, 2)
+ and col_offset < 256
+ and end_col_offset < 256
+ ):
+ packed.extend(
+ (
+ self._pack_location_header(10 + l_lineno - lineno, size),
+ col_offset,
+ end_col_offset,
+ )
+ )
+
+ # Long form
+ else:
+ packed.extend(
+ (
+ self._pack_location_header(14, size),
+ *self._encode_location_svarint(l_lineno - lineno),
+ *self._encode_location_varint(end_lineno - l_lineno),
+ # When decoding in codeobject.c::advance_with_locations
+ # we remove 1 from the offset ...
+ *self._encode_location_varint(col_offset + 1),
+ *self._encode_location_varint(end_col_offset + 1),
+ )
+ )
+
+ return packed
+
+ def _push_locations(
+ self,
+ locations: List[bytearray],
+ size: int,
+ lineno: int,
+ location: InstrLocation,
+ ) -> int:
+ # We need the size in instruction not in bytes
+ size //= 2
+
+ # Repeatedly add element since we cannot cover more than 8 code
+ # elements. We recompute each time since in practice we will
+ # rarely loop.
+ while True:
+ locations.append(self._pack_location(size, lineno, location))
+ # Update the lineno since if we need more than one entry the
+ # reference for the delta of the lineno change
+ lineno = location.lineno if location.lineno is not None else lineno
+ size -= 8
+ if size < 1:
+ break
+
+ return lineno
+
+ def _assemble_locations(
+ self,
+ first_lineno: int,
+ linenos: Iterable[Tuple[int, int, int, Optional[InstrLocation]]],
+ ) -> bytes:
+ if not linenos:
+ return b""
+
+ locations: List[bytearray] = []
+
+ iter_in = iter(linenos)
+
+ _, size, lineno, old_location = next(iter_in)
+ # Infer the line if location is None
+ old_location = old_location or InstrLocation(lineno, None, None, None)
+ lineno = first_lineno
+
+ # We track the last set lineno to be able to compute deltas
+ for _, i_size, new_lineno, location in iter_in:
+ # Infer the line if location is None
+ location = location or InstrLocation(new_lineno, None, None, None)
+
+ # Group together instruction with equivalent locations
+ if old_location.lineno and old_location == location:
+ size += i_size
+ continue
+
+ lineno = self._push_locations(locations, size, lineno, old_location)
+
+ size = i_size
+ old_location = location
+
+ # Pack the line of the last instruction.
+ self._push_locations(locations, size, lineno, old_location)
+
+ return b"".join(locations)
+
+ @staticmethod
+ def _remove_extended_args(
+ instructions: MutableSequence[Union[SetLineno, ConcreteInstr]]
+ ) -> None:
+ # replace jump targets with blocks
+ # HINT : in some cases Python generate useless EXTENDED_ARG opcode
+ # with a value of zero. Such opcodes do not increases the size of the
+ # following opcode the way a normal EXTENDED_ARG does. As a
+ # consequence, they need to be tracked manually as otherwise the
+ # offsets in jump targets can end up being wrong.
+ nb_extended_args = 0
+ extended_arg = None
+ index = 0
+ while index < len(instructions):
+ instr = instructions[index]
+
+ # Skip SetLineno meta instruction
+ if isinstance(instr, SetLineno):
+ index += 1
+ continue
+
+ if instr.name == "EXTENDED_ARG":
+ nb_extended_args += 1
+ if extended_arg is not None:
+ extended_arg = (extended_arg << 8) + instr.arg
+ else:
+ extended_arg = instr.arg
+
+ del instructions[index]
+ continue
+
+ if extended_arg is not None:
+ arg = UNSET if instr.name == "NOP" else (extended_arg << 8) + instr.arg
+ extended_arg = None
+
+ instr = ConcreteInstr(
+ instr.name,
+ arg,
+ location=instr.location,
+ extended_args=nb_extended_args,
+ )
+ instructions[index] = instr
+ nb_extended_args = 0
+
+ index += 1
+
+ if extended_arg is not None:
+ raise ValueError("EXTENDED_ARG at the end of the code")
+
+ # Taken and adapted from exception_handling_notes.txt in cpython/Objects
+ @staticmethod
+ def _parse_varint(except_table_iterator: Iterator[int]) -> int:
+ b = next(except_table_iterator)
+ val = b & 63
+ while b & 64:
+ val <<= 6
+ b = next(except_table_iterator)
+ val |= b & 63
+ return val
+
+ def _parse_exception_table(
+ self, exception_table: bytes
+ ) -> List[ExceptionTableEntry]:
+ assert sys.version_info >= (3, 11)
+ table = []
+ iterator = iter(exception_table)
+ try:
+ while True:
+ start = self._parse_varint(iterator)
+ length = self._parse_varint(iterator)
+ end = start + length - 1 # Present as inclusive
+ target = self._parse_varint(iterator)
+ dl = self._parse_varint(iterator)
+ depth = dl >> 1
+ lasti = bool(dl & 1)
+ table.append(ExceptionTableEntry(start, end, target, depth, lasti))
+ except StopIteration:
+ return table
+
+ @staticmethod
+ def _encode_varint(value: int, set_begin_marker: bool = False) -> Iterator[int]:
+ # Encode value as a varint on 7 bits (MSB should come first) and set
+ # the begin marker if requested.
+ temp: List[int] = []
+ assert value >= 0
+ while value:
+ temp.append(value & 63 | (64 if temp else 0))
+ value >>= 6
+ temp = temp or [0]
+ if set_begin_marker:
+ temp[-1] |= 128
+ return reversed(temp)
+
+ def _assemble_exception_table(self) -> bytes:
+ table = bytearray()
+ for entry in self.exception_table or []:
+ size = entry.stop_offset - entry.start_offset + 1
+ depth = (entry.stack_depth << 1) + entry.push_lasti
+ table.extend(self._encode_varint(entry.start_offset, True))
+ table.extend(self._encode_varint(size))
+ table.extend(self._encode_varint(entry.target))
+ table.extend(self._encode_varint(depth))
+
+ return bytes(table)
+
+ def compute_stacksize(self, *, check_pre_and_post: bool = True) -> int:
+ bytecode = self.to_bytecode()
+ cfg = _bytecode.ControlFlowGraph.from_bytecode(bytecode)
+ return cfg.compute_stacksize(check_pre_and_post=check_pre_and_post)
+
+ def to_code(
+ self,
+ stacksize: Optional[int] = None,
+ *,
+ check_pre_and_post: bool = True,
+ compute_exception_stack_depths: bool = True,
+ ) -> types.CodeType:
+ # Prevent reconverting the concrete bytecode to bytecode and cfg to do the
+ # calculation if we need to do it.
+ if stacksize is None or (
+ sys.version_info >= (3, 11) and compute_exception_stack_depths
+ ):
+ cfg = _bytecode.ControlFlowGraph.from_bytecode(self.to_bytecode())
+ stacksize = cfg.compute_stacksize(
+ check_pre_and_post=check_pre_and_post,
+ compute_exception_stack_depths=compute_exception_stack_depths,
+ )
+ self = cfg.to_bytecode().to_concrete_bytecode(
+ compute_exception_stack_depths=False
+ )
+
+ # Assemble the code string after round tripping to CFG if necessary.
+ code_str, linenos = self._assemble_code()
+
+ lnotab = (
+ self._assemble_locations(self.first_lineno, linenos)
+ if sys.version_info >= (3, 11)
+ else (
+ self._assemble_linestable(self.first_lineno, linenos)
+ if sys.version_info >= (3, 10)
+ else self._assemble_lnotab(self.first_lineno, linenos)
+ )
+ )
+ nlocals = len(self.varnames)
+
+ if sys.version_info >= (3, 11):
+ return types.CodeType(
+ self.argcount,
+ self.posonlyargcount,
+ self.kwonlyargcount,
+ nlocals,
+ stacksize,
+ int(self.flags),
+ code_str,
+ tuple(self.consts),
+ tuple(self.names),
+ tuple(self.varnames),
+ self.filename,
+ self.name,
+ self.qualname,
+ self.first_lineno,
+ lnotab,
+ self._assemble_exception_table(),
+ tuple(self.freevars),
+ tuple(self.cellvars),
+ )
+ else:
+ return types.CodeType(
+ self.argcount,
+ self.posonlyargcount,
+ self.kwonlyargcount,
+ nlocals,
+ stacksize,
+ int(self.flags),
+ code_str,
+ tuple(self.consts),
+ tuple(self.names),
+ tuple(self.varnames),
+ self.filename,
+ self.name,
+ self.first_lineno,
+ lnotab,
+ tuple(self.freevars),
+ tuple(self.cellvars),
+ )
+
+ def to_bytecode(
+ self,
+ prune_caches: bool = True,
+ conserve_exception_block_stackdepth: bool = False,
+ ) -> _bytecode.Bytecode:
+ # On 3.11 we generate pseudo-instruction from the exception table
+
+ # Copy instruction and remove extended args if any (in-place)
+ c_instructions = self[:]
+ self._remove_extended_args(c_instructions)
+
+ # Find jump targets
+ jump_targets: Set[int] = set()
+ offset = 0
+ for c_instr in c_instructions:
+ if isinstance(c_instr, SetLineno):
+ continue
+ target = c_instr.get_jump_target(offset)
+ if target is not None:
+ jump_targets.add(target)
+ offset += (c_instr.size // 2) if OFFSET_AS_INSTRUCTION else c_instr.size
+
+ # On 3.11+ we need to also look at the exception table for jump targets
+ for ex_entry in self.exception_table:
+ jump_targets.add(ex_entry.target)
+
+ # Create look up dict to find entries based on either exception handling
+ # block exit or entry offsets. Several blocks can end on the same instruction
+ # so we store a list of entry per offset.
+ ex_start: Dict[int, ExceptionTableEntry] = {}
+ ex_end: Dict[int, List[ExceptionTableEntry]] = {}
+ for entry in self.exception_table:
+ # Ensure we do not have more than one entry with identical starting
+ # offsets
+ assert entry.start_offset not in ex_start
+ ex_start[entry.start_offset] = entry
+ ex_end.setdefault(entry.stop_offset, []).append(entry)
+
+ # Create labels and instructions
+ jumps: List[Tuple[int, int]] = []
+ instructions: List[Union[Instr, Label, TryBegin, TryEnd, SetLineno]] = []
+ labels = {}
+ tb_instrs: Dict[ExceptionTableEntry, TryBegin] = {}
+ offset = 0
+ # In Python 3.11+ cell and varnames can be shared and are indexed in a single
+ # array.
+ # As a consequence, the instruction argument can be either:
+ # - < len(varnames): the name is shared an we can directly use
+ # the index to access the name in cellvars
+ # - > len(varnames): the name is not shared and is offset by the
+ # number unshared varname.
+ # Free vars are never shared and correspond to index larger than the
+ # largest cell var.
+ # See PyCode_NewWithPosOnlyArgs
+ if sys.version_info >= (3, 11):
+ cells_lookup = self.varnames + [
+ n for n in self.cellvars if n not in self.varnames
+ ]
+ ncells = len(cells_lookup)
+ else:
+ ncells = len(self.cellvars)
+ cells_lookup = self.cellvars
+
+ for lineno, c_instr in self._normalize_lineno(
+ c_instructions, self.first_lineno
+ ):
+ if offset in jump_targets:
+ label = Label()
+ labels[offset] = label
+ instructions.append(label)
+
+ # Handle TryBegin pseudo instructions
+ if offset in ex_start:
+ entry = ex_start[offset]
+ tb_instr = TryBegin(
+ Label(),
+ entry.push_lasti,
+ entry.stack_depth if conserve_exception_block_stackdepth else UNSET,
+ )
+ # Per entry store the pseudo instruction associated
+ tb_instrs[entry] = tb_instr
+ instructions.append(tb_instr)
+
+ jump_target = c_instr.get_jump_target(offset)
+ size = c_instr.size
+ # If an instruction uses extended args, those appear before the instruction
+ # causing the instruction to appear at offset that accounts for extended
+ # args. So we first update the offset to account for extended args, then
+ # record the instruction offset and then add the instruction itself to the
+ # offset.
+ offset += (size // 2 - 1) if OFFSET_AS_INSTRUCTION else (size - 2)
+ current_instr_offset = offset
+ offset += 1 if OFFSET_AS_INSTRUCTION else 2
+
+ # on Python 3.11+ remove CACHE opcodes if we are requested to do so.
+ # We are careful to first advance the offset and check that the CACHE
+ # is not a jump target. It should never be the case but we double check.
+ if prune_caches and c_instr.name == "CACHE":
+ assert jump_target is None
+
+ # We may need to insert a TryEnd after a CACHE so we need to run the
+ # through the last block.
+ else:
+ arg: InstrArg
+ c_arg = c_instr.arg
+ # FIXME: better error reporting
+ if c_instr.opcode in _opcode.hasconst:
+ arg = self.consts[c_arg]
+ elif c_instr.opcode in _opcode.haslocal:
+ arg = self.varnames[c_arg]
+ elif c_instr.opcode in _opcode.hasname:
+ if c_instr.name in BITFLAG_INSTRUCTIONS:
+ arg = (bool(c_arg & 1), self.names[c_arg >> 1])
+ elif c_instr.name in BITFLAG2_INSTRUCTIONS:
+ arg = (bool(c_arg & 1), bool(c_arg & 2), self.names[c_arg >> 2])
+ else:
+ arg = self.names[c_arg]
+ elif c_instr.opcode in _opcode.hasfree:
+ if c_arg < ncells:
+ name = cells_lookup[c_arg]
+ arg = CellVar(name)
+ else:
+ name = self.freevars[c_arg - ncells]
+ arg = FreeVar(name)
+ elif c_instr.opcode in _opcode.hascompare:
+ arg = Compare(
+ (c_arg >> 4) if sys.version_info >= (3, 12) else c_arg
+ )
+ elif c_instr.opcode in INTRINSIC_1OP:
+ arg = Intrinsic1Op(c_arg)
+ elif c_instr.opcode in INTRINSIC_2OP:
+ arg = Intrinsic2Op(c_arg)
+ else:
+ arg = c_arg
+
+ location = c_instr.location or InstrLocation(lineno, None, None, None)
+
+ if jump_target is not None:
+ arg = PLACEHOLDER_LABEL
+ instr_index = len(instructions)
+ jumps.append((instr_index, jump_target))
+
+ instructions.append(Instr(c_instr.name, arg, location=location))
+
+ # We now insert the TryEnd entries
+ if current_instr_offset in ex_end:
+ entries = ex_end[current_instr_offset]
+ for entry in reversed(entries):
+ instructions.append(TryEnd(tb_instrs[entry]))
+
+ # Replace jump targets with labels
+ for index, jump_target in jumps:
+ instr = instructions[index]
+ assert isinstance(instr, Instr) and instr.arg is PLACEHOLDER_LABEL
+ # FIXME: better error reporting on missing label
+ instr.arg = labels[jump_target]
+
+ # Set the label for TryBegin
+ for entry, tb in tb_instrs.items():
+ tb.target = labels[entry.target]
+
+ bytecode = _bytecode.Bytecode()
+ bytecode._copy_attr_from(self)
+
+ nargs = bytecode.argcount + bytecode.kwonlyargcount
+ nargs += bytecode.posonlyargcount
+ if bytecode.flags & inspect.CO_VARARGS:
+ nargs += 1
+ if bytecode.flags & inspect.CO_VARKEYWORDS:
+ nargs += 1
+ bytecode.argnames = self.varnames[:nargs]
+ _set_docstring(bytecode, self.consts)
+
+ bytecode.extend(instructions)
+ return bytecode
+
+
+class _ConvertBytecodeToConcrete:
+ # XXX document attributes
+
+ #: Default number of passes of compute_jumps() before giving up. Refer to
+ #: assemble_jump_offsets() in compile.c for background.
+ _compute_jumps_passes = 10
+
+ def __init__(self, code: _bytecode.Bytecode) -> None:
+ assert isinstance(code, _bytecode.Bytecode)
+ self.bytecode = code
+
+ # temporary variables
+ self.instructions: List[ConcreteInstr] = []
+ self.jumps: List[Tuple[int, Label, ConcreteInstr]] = []
+ self.labels: Dict[Label, int] = {}
+ self.exception_handling_blocks: Dict[TryBegin, ExceptionTableEntry] = {}
+ self.required_caches = 0
+ self.seen_manual_cache = False
+
+ # used to build ConcreteBytecode() object
+ self.consts_indices: Dict[Union[bytes, Tuple[type, int]], int] = {}
+ self.consts_list: List[Any] = []
+ self.names: List[str] = []
+ self.varnames: List[str] = []
+
+ def add_const(self, value: Any) -> int:
+ key = const_key(value)
+ if key in self.consts_indices:
+ return self.consts_indices[key]
+ index = len(self.consts_indices)
+ self.consts_indices[key] = index
+ self.consts_list.append(value)
+ return index
+
+ @staticmethod
+ def add(names: List[str], name: str) -> int:
+ try:
+ index = names.index(name)
+ except ValueError:
+ index = len(names)
+ names.append(name)
+ return index
+
+ def concrete_instructions(self) -> None:
+ lineno = self.bytecode.first_lineno
+ # Track instruction (index) using cell vars and free vars to be able to update
+ # the index used once all the names are known.
+ cell_instrs: List[int] = []
+ free_instrs: List[int] = []
+
+ for instr in self.bytecode:
+ # Enforce proper use of CACHE opcode on Python 3.11+ by checking we get the
+ # number we expect or directly generate the needed ones.
+ if isinstance(instr, Instr) and instr.name == "CACHE":
+ if not self.required_caches:
+ raise RuntimeError("Found a CACHE opcode when none was expected.")
+ self.seen_manual_cache = True
+ self.required_caches -= 1
+
+ elif self.required_caches:
+ if not self.seen_manual_cache:
+ # We preserve the location of the instruction requiring the
+ # presence of cache instructions
+ self.instructions.extend(
+ [
+ ConcreteInstr(
+ "CACHE", 0, location=self.instructions[-1].location
+ )
+ for i in range(self.required_caches)
+ ]
+ )
+ self.required_caches = 0
+ self.seen_manual_cache = False
+ else:
+ raise RuntimeError(
+ "Found some manual opcode but less than expected. "
+ f"Missing {self.required_caches} CACHE opcodes."
+ )
+
+ if isinstance(instr, Label):
+ self.labels[instr] = len(self.instructions)
+ continue
+
+ if isinstance(instr, SetLineno):
+ lineno = instr.lineno
+ continue
+
+ if isinstance(instr, TryBegin):
+ # We expect the stack depth to have be provided or computed earlier
+ assert instr.stack_depth is not UNSET
+ # NOTE here we store the index of the instruction at which the
+ # exception table entry starts. This is not the final value we want,
+ # we want the offset in the bytecode but that requires to compute
+ # the jumps first to resolve any possible extended arg needed in a
+ # jump.
+ self.exception_handling_blocks[instr] = ExceptionTableEntry(
+ len(self.instructions), 0, 0, instr.stack_depth, instr.push_lasti
+ )
+ continue
+
+ # Do not handle TryEnd before we insert possible CACHE opcode
+ if isinstance(instr, TryEnd):
+ entry = self.exception_handling_blocks[instr.entry]
+ # The TryEnd is located after the last opcode in the exception entry
+ # so we move the offset by one. We choose one so that the end does
+ # encompass a possible EXTENDED_ARG
+ entry.stop_offset = len(self.instructions) - 1
+ continue
+
+ assert isinstance(instr, Instr)
+
+ if instr.lineno is not UNSET and instr.lineno is not None:
+ lineno = instr.lineno
+ elif instr.lineno is UNSET:
+ instr.lineno = lineno
+
+ arg = instr.arg
+ is_jump = False
+ if isinstance(arg, Label):
+ label = arg
+ # fake value, real value is set in compute_jumps()
+ arg = 0
+ is_jump = True
+ elif instr.opcode in _opcode.hasconst:
+ arg = self.add_const(arg)
+ elif instr.opcode in _opcode.haslocal:
+ assert isinstance(arg, str)
+ arg = self.add(self.varnames, arg)
+ elif instr.opcode in _opcode.hasname:
+ if instr.name in BITFLAG_INSTRUCTIONS:
+ assert (
+ isinstance(arg, tuple)
+ and len(arg) == 2
+ and isinstance(arg[0], bool)
+ and isinstance(arg[1], str)
+ ), arg
+ index = self.add(self.names, arg[1])
+ arg = int(arg[0]) + (index << 1)
+ elif instr.name in BITFLAG2_INSTRUCTIONS:
+ assert (
+ isinstance(arg, tuple)
+ and len(arg) == 3
+ and isinstance(arg[0], bool)
+ and isinstance(arg[1], bool)
+ and isinstance(arg[2], str)
+ ), arg
+ index = self.add(self.names, arg[2])
+ arg = int(arg[0]) + 2 * int(arg[1]) + (index << 2)
+ else:
+ assert isinstance(arg, str), f"Got {arg}, expected a str"
+ arg = self.add(self.names, arg)
+ elif instr.opcode in _opcode.hasfree:
+ if isinstance(arg, CellVar):
+ cell_instrs.append(len(self.instructions))
+ arg = self.bytecode.cellvars.index(arg.name)
+ else:
+ assert isinstance(arg, FreeVar)
+ free_instrs.append(len(self.instructions))
+ arg = self.bytecode.freevars.index(arg.name)
+ elif instr.opcode in _opcode.hascompare:
+ if isinstance(arg, Compare):
+ # In Python 3.12 the 4 lowest bits are used for caching
+ # See compare_masks in compile.c
+ if sys.version_info >= (3, 12):
+ arg = arg._get_mask() + (arg.value << 4)
+ else:
+ arg = arg.value
+ elif instr.opcode in INTRINSIC:
+ if isinstance(arg, (Intrinsic1Op, Intrinsic2Op)):
+ arg = arg.value
+
+ # The above should have performed all the necessary conversion
+ assert isinstance(arg, int)
+ c_instr = ConcreteInstr(instr.name, arg, location=instr.location)
+ if is_jump:
+ self.jumps.append((len(self.instructions), label, c_instr))
+
+ # If the instruction expect some cache
+ if sys.version_info >= (3, 11):
+ self.required_caches = c_instr.use_cache_opcodes()
+ self.seen_manual_cache = False
+
+ self.instructions.append(c_instr)
+
+ # On Python 3.11 varnames and cells can share some names. Wind the shared
+ # names and update the arg argument of instructions using cell vars.
+ # We also track by how much to offset free vars which are stored in a
+ # contiguous array after the cell vars
+ if sys.version_info >= (3, 11):
+ # Map naive cell index to shared index
+ shared_name_indexes: Dict[int, int] = {}
+ n_shared = 0
+ n_unshared = 0
+ for i, name in enumerate(self.bytecode.cellvars):
+ if name in self.varnames:
+ shared_name_indexes[i] = self.varnames.index(name)
+ n_shared += 1
+ else:
+ shared_name_indexes[i] = len(self.varnames) + n_unshared
+ n_unshared += 1
+
+ for index in cell_instrs:
+ c_instr = self.instructions[index]
+ c_instr.arg = shared_name_indexes[c_instr.arg]
+
+ free_offset = len(self.varnames) + len(self.bytecode.cellvars) - n_shared
+ else:
+ free_offset = len(self.bytecode.cellvars)
+
+ for index in free_instrs:
+ c_instr = self.instructions[index]
+ c_instr.arg += free_offset
+
+ def compute_jumps(self) -> bool:
+ # For labels we need the offset before the instruction at a given index but for
+ # exception table entries we need the offset of the instruction which can differ
+ # in the presence of extended args...
+ label_offsets = []
+ instruction_offsets = []
+ offset = 0
+ for index, instr in enumerate(self.instructions):
+ label_offsets.append(offset)
+ # If an instruction uses extended args, those appear before the instruction
+ # causing the instruction to appear at offset that accounts for extended
+ # args.
+ offset += (
+ (instr.size // 2 - 1) if OFFSET_AS_INSTRUCTION else (instr.size - 2)
+ )
+ instruction_offsets.append(offset)
+ offset += 1 if OFFSET_AS_INSTRUCTION else 2
+ # needed if a label is at the end
+ label_offsets.append(offset)
+
+ # FIXME may need some extra check to validate jump forward vs jump backward
+ # fix argument of jump instructions: resolve labels
+ modified = False
+ for index, label, instr in self.jumps:
+ target_index = self.labels[label]
+ target_offset = label_offsets[target_index]
+
+ # FIXME use opcode
+ # Under 3.12+, FOR_ITER, SEND jump is increased by 1 implicitely
+ # to skip over END_FOR, END_SEND see Python/instrumentation.c
+ if sys.version_info >= (3, 12) and instr.name in ("FOR_ITER", "SEND"):
+ target_offset -= 1
+
+ if instr.is_forward_rel_jump():
+ instr_offset = label_offsets[index]
+ target_offset -= instr_offset + (
+ instr.size // 2 if OFFSET_AS_INSTRUCTION else instr.size
+ )
+ elif instr.is_backward_rel_jump():
+ instr_offset = label_offsets[index]
+ target_offset = (
+ instr_offset
+ + (instr.size // 2 if OFFSET_AS_INSTRUCTION else instr.size)
+ - target_offset
+ )
+
+ old_size = instr.size
+ # FIXME: better error report if target_offset is negative
+ instr.arg = target_offset
+ if instr.size != old_size:
+ modified = True
+
+ # If a jump required an extended arg hence invalidating the calculation
+ # we return early before filling the exception table entries
+ if modified:
+ return modified
+
+ # Resolve labels for exception handling entries
+ for tb, entry in self.exception_handling_blocks.items():
+ # Set the offset for the start and end offset from the instruction
+ # index stored when assembling the concrete instructions.
+ entry.start_offset = instruction_offsets[entry.start_offset]
+ entry.stop_offset = instruction_offsets[entry.stop_offset]
+
+ # Set the offset to the target instruction
+ lb = tb.target
+ assert isinstance(lb, Label)
+ target_index = self.labels[lb]
+ target_offset = label_offsets[target_index]
+ entry.target = target_offset
+
+ return False
+
+ def to_concrete_bytecode(
+ self,
+ compute_jumps_passes: Optional[int] = None,
+ compute_exception_stack_depths: bool = True,
+ ) -> ConcreteBytecode:
+ if sys.version_info >= (3, 11) and compute_exception_stack_depths:
+ cfg = _bytecode.ControlFlowGraph.from_bytecode(self.bytecode)
+ cfg.compute_stacksize(compute_exception_stack_depths=True)
+ self.bytecode = cfg.to_bytecode()
+
+ if compute_jumps_passes is None:
+ compute_jumps_passes = self._compute_jumps_passes
+
+ first_const = self.bytecode.docstring
+ if first_const is not UNSET:
+ self.add_const(first_const)
+
+ self.varnames.extend(self.bytecode.argnames)
+
+ self.concrete_instructions()
+ for pas in range(0, compute_jumps_passes):
+ modified = self.compute_jumps()
+ if not modified:
+ break
+ else:
+ raise RuntimeError(
+ "compute_jumps() failed to converge after" " %d passes" % (pas + 1)
+ )
+
+ concrete = ConcreteBytecode(
+ self.instructions,
+ consts=tuple(self.consts_list),
+ names=tuple(self.names),
+ varnames=self.varnames,
+ exception_table=list(self.exception_handling_blocks.values()),
+ )
+ concrete._copy_attr_from(self.bytecode)
+ return concrete
diff --git a/lambdas/aws-dd-forwarder-3.127.0/bytecode/flags.py b/lambdas/aws-dd-forwarder-3.127.0/bytecode/flags.py
new file mode 100644
index 0000000..039150f
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/bytecode/flags.py
@@ -0,0 +1,187 @@
+import opcode
+import sys
+from enum import IntFlag
+from typing import Optional, Union
+
+# alias to keep the 'bytecode' variable free
+import bytecode as _bytecode
+
+
+class CompilerFlags(IntFlag):
+ """Possible values of the co_flags attribute of Code object.
+
+ Note: We do not rely on inspect values here as some of them are missing and
+ furthermore would be version dependent.
+
+ """
+
+ OPTIMIZED = 0x00001 # noqa
+ NEWLOCALS = 0x00002 # noqa
+ VARARGS = 0x00004 # noqa
+ VARKEYWORDS = 0x00008 # noqa
+ NESTED = 0x00010 # noqa
+ GENERATOR = 0x00020 # noqa
+ NOFREE = 0x00040 # noqa
+ # New in Python 3.5
+ # Used for coroutines defined using async def ie native coroutine
+ COROUTINE = 0x00080 # noqa
+ # Used for coroutines defined as a generator and then decorated using
+ # types.coroutine
+ ITERABLE_COROUTINE = 0x00100 # noqa
+ # New in Python 3.6
+ # Generator defined in an async def function
+ ASYNC_GENERATOR = 0x00200 # noqa
+
+ # __future__ flags
+ # future flags changed in Python 3.9
+ if sys.version_info < (3, 9):
+ FUTURE_GENERATOR_STOP = 0x80000 # noqa
+ FUTURE_ANNOTATIONS = 0x100000
+ else:
+ FUTURE_GENERATOR_STOP = 0x800000 # noqa
+ FUTURE_ANNOTATIONS = 0x1000000
+
+
+def infer_flags(
+ bytecode: Union[
+ "_bytecode.Bytecode", "_bytecode.ConcreteBytecode", "_bytecode.ControlFlowGraph"
+ ],
+ is_async: Optional[bool] = None,
+):
+ """Infer the proper flags for a bytecode based on the instructions.
+
+ Because the bytecode does not have enough context to guess if a function
+ is asynchronous the algorithm tries to be conservative and will never turn
+ a previously async code into a sync one.
+
+ Parameters
+ ----------
+ bytecode : Bytecode | ConcreteBytecode | ControlFlowGraph
+ Bytecode for which to infer the proper flags
+ is_async : bool | None, optional
+ Force the code to be marked as asynchronous if True, prevent it from
+ being marked as asynchronous if False and simply infer the best
+ solution based on the opcode and the existing flag if None.
+
+ """
+ flags = CompilerFlags(0)
+ if not isinstance(
+ bytecode,
+ (_bytecode.Bytecode, _bytecode.ConcreteBytecode, _bytecode.ControlFlowGraph),
+ ):
+ msg = (
+ "Expected a Bytecode, ConcreteBytecode or ControlFlowGraph "
+ "instance not %s"
+ )
+ raise ValueError(msg % bytecode)
+
+ instructions = (
+ bytecode._get_instructions()
+ if isinstance(bytecode, _bytecode.ControlFlowGraph)
+ else bytecode
+ )
+ instr_names = {
+ i.name
+ for i in instructions
+ if not isinstance(
+ i,
+ (
+ _bytecode.SetLineno,
+ _bytecode.Label,
+ _bytecode.TryBegin,
+ _bytecode.TryEnd,
+ ),
+ )
+ }
+
+ # Identify optimized code
+ if not (instr_names & {"STORE_NAME", "LOAD_NAME", "DELETE_NAME"}):
+ flags |= CompilerFlags.OPTIMIZED
+
+ # Check for free variables
+ if not (instr_names & {opcode.opname[i] for i in opcode.hasfree}):
+ flags |= CompilerFlags.NOFREE
+
+ # Copy flags for which we cannot infer the right value
+ flags |= bytecode.flags & (
+ CompilerFlags.NEWLOCALS
+ | CompilerFlags.VARARGS
+ | CompilerFlags.VARKEYWORDS
+ | CompilerFlags.NESTED
+ )
+
+ sure_generator = instr_names & {"YIELD_VALUE"}
+ maybe_generator = instr_names & {"YIELD_VALUE", "YIELD_FROM"}
+
+ sure_async = instr_names & {
+ "GET_AWAITABLE",
+ "GET_AITER",
+ "GET_ANEXT",
+ "BEFORE_ASYNC_WITH",
+ "SETUP_ASYNC_WITH",
+ "END_ASYNC_FOR",
+ "ASYNC_GEN_WRAP", # New in 3.11
+ }
+
+ # If performing inference or forcing an async behavior, first inspect
+ # the flags since this is the only way to identify iterable coroutines
+ if is_async in (None, True):
+ if bytecode.flags & CompilerFlags.COROUTINE:
+ if sure_generator:
+ flags |= CompilerFlags.ASYNC_GENERATOR
+ else:
+ flags |= CompilerFlags.COROUTINE
+ elif bytecode.flags & CompilerFlags.ITERABLE_COROUTINE:
+ if sure_async:
+ msg = (
+ "The ITERABLE_COROUTINE flag is set but bytecode that"
+ "can only be used in async functions have been "
+ "detected. Please unset that flag before performing "
+ "inference."
+ )
+ raise ValueError(msg)
+ flags |= CompilerFlags.ITERABLE_COROUTINE
+ elif bytecode.flags & CompilerFlags.ASYNC_GENERATOR:
+ if not sure_generator:
+ flags |= CompilerFlags.COROUTINE
+ else:
+ flags |= CompilerFlags.ASYNC_GENERATOR
+
+ # If the code was not asynchronous before determine if it should now be
+ # asynchronous based on the opcode and the is_async argument.
+ else:
+ if sure_async:
+ # YIELD_FROM is not allowed in async generator
+ if sure_generator:
+ flags |= CompilerFlags.ASYNC_GENERATOR
+ else:
+ flags |= CompilerFlags.COROUTINE
+
+ elif maybe_generator:
+ if is_async:
+ if sure_generator:
+ flags |= CompilerFlags.ASYNC_GENERATOR
+ else:
+ flags |= CompilerFlags.COROUTINE
+ else:
+ flags |= CompilerFlags.GENERATOR
+
+ elif is_async:
+ flags |= CompilerFlags.COROUTINE
+
+ # If the code should not be asynchronous, check first it is possible and
+ # next set the GENERATOR flag if relevant
+ else:
+ if sure_async:
+ raise ValueError(
+ "The is_async argument is False but bytecodes "
+ "that can only be used in async functions have "
+ "been detected."
+ )
+
+ if maybe_generator:
+ flags |= CompilerFlags.GENERATOR
+
+ flags |= bytecode.flags & CompilerFlags.FUTURE_GENERATOR_STOP
+
+ return flags
diff --git a/lambdas/aws-dd-forwarder-3.127.0/bytecode/instr.py b/lambdas/aws-dd-forwarder-3.127.0/bytecode/instr.py
new file mode 100644
index 0000000..e927cdf
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/bytecode/instr.py
@@ -0,0 +1,878 @@
+import dis
+import enum
+import opcode as _opcode
+import sys
+from abc import abstractmethod
+from dataclasses import dataclass
+from marshal import dumps as _dumps
+from typing import Any, Callable, Dict, Generic, Optional, Tuple, TypeVar, Union
+
+try:
+ from typing import TypeGuard
+except ImportError:
+ from typing_extensions import TypeGuard # type: ignore
+
+import bytecode as _bytecode
+
+# --- Instruction argument tools and
+
+MIN_INSTRUMENTED_OPCODE = getattr(_opcode, "MIN_INSTRUMENTED_OPCODE", 256)
+
+# Instructions relying on a bit to modify its behavior.
+# The lowest bit is used to encode custom behavior.
+BITFLAG_INSTRUCTIONS = (
+ ("LOAD_GLOBAL", "LOAD_ATTR")
+ if sys.version_info >= (3, 12)
+ else ("LOAD_GLOBAL",)
+ if sys.version_info >= (3, 11)
+ else ()
+)
+
+BITFLAG2_INSTRUCTIONS = ("LOAD_SUPER_ATTR",) if sys.version_info >= (3, 12) else ()
+
+# Intrinsic related opcodes
+INTRINSIC_1OP = (
+ (_opcode.opmap["CALL_INTRINSIC_1"],) if sys.version_info >= (3, 12) else ()
+)
+INTRINSIC_2OP = (
+ (_opcode.opmap["CALL_INTRINSIC_2"],) if sys.version_info >= (3, 12) else ()
+)
+INTRINSIC = INTRINSIC_1OP + INTRINSIC_2OP
+
+
+# Used for COMPARE_OP opcode argument
+@enum.unique
+class Compare(enum.IntEnum):
+ LT = 0
+ LE = 1
+ EQ = 2
+ NE = 3
+ GT = 4
+ GE = 5
+ if sys.version_info < (3, 9):
+ IN = 6
+ NOT_IN = 7
+ IS = 8
+ IS_NOT = 9
+ EXC_MATCH = 10
+
+ if sys.version_info >= (3, 12):
+
+ def _get_mask(self):
+ if self == Compare.EQ:
+ return 8
+ elif self == Compare.NE:
+ return 1 + 2 + 4
+ elif self == Compare.LT:
+ return 2
+ elif self == Compare.LE:
+ return 2 + 8
+ elif self == Compare.GT:
+ return 4
+ elif self == Compare.GE:
+ return 4 + 8
+
+
+# Used for BINARY_OP under Python 3.11+
+@enum.unique
+class BinaryOp(enum.IntEnum):
+ ADD = 0
+ AND = 1
+ FLOOR_DIVIDE = 2
+ LSHIFT = 3
+ MATRIX_MULTIPLY = 4
+ MULTIPLY = 5
+ REMAINDER = 6
+ OR = 7
+ POWER = 8
+ RSHIFT = 9
+ SUBTRACT = 10
+ TRUE_DIVIDE = 11
+ XOR = 12
+ INPLACE_ADD = 13
+ INPLACE_AND = 14
+ INPLACE_FLOOR_DIVIDE = 15
+ INPLACE_LSHIFT = 16
+ INPLACE_MATRIX_MULTIPLY = 17
+ INPLACE_MULTIPLY = 18
+ INPLACE_REMAINDER = 19
+ INPLACE_OR = 20
+ INPLACE_POWER = 21
+ INPLACE_RSHIFT = 22
+ INPLACE_SUBTRACT = 23
+ INPLACE_TRUE_DIVIDE = 24
+ INPLACE_XOR = 25
+
+
+@enum.unique
+class Intrinsic1Op(enum.IntEnum):
+ INTRINSIC_1_INVALID = 0
+ INTRINSIC_PRINT = 1
+ INTRINSIC_IMPORT_STAR = 2
+ INTRINSIC_STOPITERATION_ERROR = 3
+ INTRINSIC_ASYNC_GEN_WRAP = 4
+ INTRINSIC_UNARY_POSITIVE = 5
+ INTRINSIC_LIST_TO_TUPLE = 6
+ INTRINSIC_TYPEVAR = 7
+ INTRINSIC_PARAMSPEC = 8
+ INTRINSIC_TYPEVARTUPLE = 9
+ INTRINSIC_SUBSCRIPT_GENERIC = 10
+ INTRINSIC_TYPEALIAS = 11
+
+
+@enum.unique
+class Intrinsic2Op(enum.IntEnum):
+ INTRINSIC_2_INVALID = 0
+ INTRINSIC_PREP_RERAISE_STAR = 1
+ INTRINSIC_TYPEVAR_WITH_BOUND = 2
+ INTRINSIC_TYPEVAR_WITH_CONSTRAINTS = 3
+ INTRINSIC_SET_FUNCTION_TYPE_PARAMS = 4
+
+
+# This make type checking happy but means it won't catch attempt to manipulate an unset
+# statically. We would need guard on object attribute narrowed down through methods
+class _UNSET(int):
+ instance = None
+
+ def __new__(cls):
+ if cls.instance is None:
+ cls.instance = super().__new__(cls)
+ return cls.instance
+
+ def __eq__(self, other) -> bool:
+ return self is other
+
+
+for op in [
+ "__abs__",
+ "__add__",
+ "__and__",
+ "__bool__",
+ "__ceil__",
+ "__divmod__",
+ "__float__",
+ "__floor__",
+ "__floordiv__",
+ "__ge__",
+ "__gt__",
+ "__hash__",
+ "__index__",
+ "__int__",
+ "__invert__",
+ "__le__",
+ "__lshift__",
+ "__lt__",
+ "__mod__",
+ "__mul__",
+ "__ne__",
+ "__neg__",
+ "__or__",
+ "__pos__",
+ "__pow__",
+ "__radd__",
+ "__rand__",
+ "__rdivmod__",
+ "__rfloordiv__",
+ "__rlshift__",
+ "__rmod__",
+ "__rmul__",
+ "__ror__",
+ "__round__",
+ "__rpow__",
+ "__rrshift__",
+ "__rshift__",
+ "__rsub__",
+ "__rtruediv__",
+ "__rxor__",
+ "__sub__",
+ "__truediv__",
+ "__trunc__",
+ "__xor__",
+]:
+ setattr(_UNSET, op, lambda *args: NotImplemented)
+
+
+UNSET = _UNSET()
+
+
+def const_key(obj: Any) -> Union[bytes, Tuple[type, int]]:
+ try:
+ return _dumps(obj)
+ except ValueError:
+ # For other types, we use the object identifier as an unique identifier
+ # to ensure that they are seen as unequal.
+ return (type(obj), id(obj))
+
+
+class Label:
+ __slots__ = ()
+
+
+#: Placeholder label temporarily used when performing some conversions
+#: concrete -> bytecode
+PLACEHOLDER_LABEL = Label()
+
+
+class _Variable:
+ __slots__ = ("name",)
+
+ def __init__(self, name: str) -> None:
+ self.name: str = name
+
+ def __eq__(self, other: Any) -> bool:
+ if type(self) is not type(other):
+ return False
+ return self.name == other.name
+
+ def __str__(self) -> str:
+ return self.name
+
+ def __repr__(self) -> str:
+ return "<%s %r>" % (self.__class__.__name__, self.name)
+
+
+class CellVar(_Variable):
+ __slots__ = ()
+
+
+class FreeVar(_Variable):
+ __slots__ = ()
+
+
+def _check_arg_int(arg: Any, name: str) -> TypeGuard[int]:
+ if not isinstance(arg, int):
+ raise TypeError(
+ "operation %s argument must be an int, "
+ "got %s" % (name, type(arg).__name__)
+ )
+
+ if not (0 <= arg <= 2147483647):
+ raise ValueError(
+ "operation %s argument must be in " "the range 0..2,147,483,647" % name
+ )
+
+ return True
+
+
+if sys.version_info >= (3, 12):
+
+ def opcode_has_argument(opcode: int) -> bool:
+ return opcode in dis.hasarg
+
+else:
+
+ def opcode_has_argument(opcode: int) -> bool:
+ return opcode >= dis.HAVE_ARGUMENT
+
+
+# --- Instruction stack effect impact
+
+# We split the stack effect between the manipulations done on the stack before
+# executing the instruction (fetching the elements that are going to be used)
+# and what is pushed back on the stack after the execution is complete.
+
+# Stack effects that do not depend on the argument of the instruction
+STATIC_STACK_EFFECTS: Dict[str, Tuple[int, int]] = {
+ "ROT_TWO": (-2, 2),
+ "ROT_THREE": (-3, 3),
+ "ROT_FOUR": (-4, 4),
+ "DUP_TOP": (-1, 2),
+ "DUP_TOP_TWO": (-2, 4),
+ "GET_LEN": (-1, 2),
+ "GET_ITER": (-1, 1),
+ "GET_YIELD_FROM_ITER": (-1, 1),
+ "GET_AWAITABLE": (-1, 1),
+ "GET_AITER": (-1, 1),
+ "GET_ANEXT": (-1, 2),
+ "LIST_TO_TUPLE": (-1, 1),
+ "LIST_EXTEND": (-2, 1),
+ "SET_UPDATE": (-2, 1),
+ "DICT_UPDATE": (-2, 1),
+ "DICT_MERGE": (-2, 1),
+ "COMPARE_OP": (-2, 1),
+ "IS_OP": (-2, 1),
+ "CONTAINS_OP": (-2, 1),
+ "IMPORT_NAME": (-2, 1),
+ "ASYNC_GEN_WRAP": (-1, 1),
+ "PUSH_EXC_INFO": (-1, 2),
+ # Pop TOS and push TOS.__aexit__ and result of TOS.__aenter__()
+ "BEFORE_ASYNC_WITH": (-1, 2),
+ # Replace TOS based on TOS and TOS1
+ "IMPORT_FROM": (-1, 2),
+ "COPY_DICT_WITHOUT_KEYS": (-2, 2),
+ # Call a function at position 7 (4 3.11+) on the stack and push the return value
+ "WITH_EXCEPT_START": (-4, 5) if sys.version_info >= (3, 11) else (-7, 8),
+ # Starting with Python 3.11 MATCH_CLASS does not push a boolean anymore
+ "MATCH_CLASS": (-3, 1 if sys.version_info >= (3, 11) else 2),
+ "MATCH_MAPPING": (-1, 2),
+ "MATCH_SEQUENCE": (-1, 2),
+ "MATCH_KEYS": (-2, 3 if sys.version_info >= (3, 11) else 4),
+ "CHECK_EXC_MATCH": (-2, 2), # (TOS1, TOS) -> (TOS1, bool)
+ "CHECK_EG_MATCH": (-2, 2), # (TOS, TOS1) -> non-matched, matched or TOS1, None)
+ "PREP_RERAISE_STAR": (-2, 1), # (TOS1, TOS) -> new exception group)
+ **{k: (-1, 1) for k in (o for o in _opcode.opmap if (o.startswith("UNARY_")))},
+ **{
+ k: (-2, 1)
+ for k in (
+ o
+ for o in _opcode.opmap
+ if (o.startswith("BINARY_") or o.startswith("INPLACE_"))
+ )
+ },
+ # Python 3.12 changes not covered by dis.stack_effect
+ "BINARY_SLICE": (-3, 1),
+ # "STORE_SLICE" handled by dis.stack_effect
+ "LOAD_FROM_DICT_OR_GLOBALS": (-1, 1),
+ "LOAD_FROM_DICT_OR_DEREF": (-1, 1),
+ "LOAD_INTRISIC_1": (-1, 1),
+ "LOAD_INTRISIC_2": (-2, 1),
+}
+
+
+DYNAMIC_STACK_EFFECTS: Dict[
+ str, Callable[[int, Any, Optional[bool]], Tuple[int, int]]
+] = {
+ # PRECALL pops all arguments (as per its stack effect) and leaves
+ # the callable and either self or NULL
+ # CALL pops the 2 above items and push the return
+ # (when PRECALL does not exist it pops more as encoded by the effect)
+ "CALL": lambda effect, arg, jump: (
+ -2 - arg if sys.version_info >= (3, 12) else -2,
+ 1,
+ ),
+ # 3.12 changed the behavior of LOAD_ATTR
+ "LOAD_ATTR": lambda effect, arg, jump: (-1, 1 + effect),
+ "LOAD_SUPER_ATTR": lambda effect, arg, jump: (-3, 3 + effect),
+ "SWAP": lambda effect, arg, jump: (-arg, arg),
+ "COPY": lambda effect, arg, jump: (-arg, arg + effect),
+ "ROT_N": lambda effect, arg, jump: (-arg, arg),
+ "SET_ADD": lambda effect, arg, jump: (-arg, arg - 1),
+ "LIST_APPEND": lambda effect, arg, jump: (-arg, arg - 1),
+ "MAP_ADD": lambda effect, arg, jump: (-arg, arg - 2),
+ "FORMAT_VALUE": lambda effect, arg, jump: (effect - 1, 1),
+ # FOR_ITER needs TOS to be an iterator, hence a prerequisite of 1 on the stack
+ "FOR_ITER": lambda effect, arg, jump: (effect, 0) if jump else (-1, 2),
+ **{
+ # Instr(UNPACK_* , n) pops 1 and pushes n
+ k: lambda effect, arg, jump: (-1, effect + 1)
+ for k in (
+ "UNPACK_SEQUENCE",
+ "UNPACK_EX",
+ )
+ },
+ **{
+ k: lambda effect, arg, jump: (effect - 1, 1)
+ for k in (
+ "MAKE_FUNCTION",
+ "CALL_FUNCTION",
+ "CALL_FUNCTION_EX",
+ "CALL_FUNCTION_KW",
+ "CALL_METHOD",
+ *(o for o in _opcode.opmap if o.startswith("BUILD_")),
+ )
+ },
+}
+
+
+# --- Instruction location
+
+
+def _check_location(
+ location: Optional[int], location_name: str, min_value: int
+) -> None:
+ if location is None:
+ return
+ if not isinstance(location, int):
+ raise TypeError(f"{location_name} must be an int, got {type(location)}")
+ if location < min_value:
+ raise ValueError(
+ f"invalid {location_name}, expected >= {min_value}, got {location}"
+ )
+
+
+@dataclass(frozen=True)
+class InstrLocation:
+ """Location information for an instruction."""
+
+ #: Lineno at which the instruction corresponds.
+ #: Optional so that a location of None in an instruction encode an unset value.
+ lineno: Optional[int]
+
+ #: End lineno at which the instruction corresponds (Python 3.11+ only)
+ end_lineno: Optional[int]
+
+ #: Column offset at which the instruction corresponds (Python 3.11+ only)
+ col_offset: Optional[int]
+
+ #: End column offset at which the instruction corresponds (Python 3.11+ only)
+ end_col_offset: Optional[int]
+
+ __slots__ = ["lineno", "end_lineno", "col_offset", "end_col_offset"]
+
+ def __init__(
+ self,
+ lineno: Optional[int],
+ end_lineno: Optional[int],
+ col_offset: Optional[int],
+ end_col_offset: Optional[int],
+ ) -> None:
+ # Needed because we want the class to be frozen
+ object.__setattr__(self, "lineno", lineno)
+ object.__setattr__(self, "end_lineno", end_lineno)
+ object.__setattr__(self, "col_offset", col_offset)
+ object.__setattr__(self, "end_col_offset", end_col_offset)
+ # In Python 3.11 0 is a valid lineno for some instructions (RESUME for example)
+ _check_location(lineno, "lineno", 0 if sys.version_info >= (3, 11) else 1)
+ _check_location(end_lineno, "end_lineno", 1)
+ _check_location(col_offset, "col_offset", 0)
+ _check_location(end_col_offset, "end_col_offset", 0)
+ if end_lineno:
+ if lineno is None:
+ raise ValueError("End lineno specified with no lineno.")
+ elif lineno > end_lineno:
+ raise ValueError(
+ f"End lineno {end_lineno} cannot be smaller than lineno {lineno}."
+ )
+
+ if col_offset is not None or end_col_offset is not None:
+ if lineno is None or end_lineno is None:
+ raise ValueError(
+ "Column offsets were specified but lineno information are "
+ f"incomplete. Lineno: {lineno}, end lineno: {end_lineno}."
+ )
+ if end_col_offset is not None:
+ if col_offset is None:
+ raise ValueError(
+ "End column offset specified with no column offset."
+ )
+ # Column offset must be increasing inside a signle line but
+ # have no relations between different lines.
+ elif lineno == end_lineno and col_offset > end_col_offset:
+ raise ValueError(
+ f"End column offset {end_col_offset} cannot be smaller than "
+ f"column offset: {col_offset}."
+ )
+ else:
+ raise ValueError(
+ "No end column offset was specified but a column offset was given."
+ )
+
+ @classmethod
+ def from_positions(cls, position: "dis.Positions") -> "InstrLocation": # type: ignore
+ return InstrLocation(
+ position.lineno,
+ position.end_lineno,
+ position.col_offset,
+ position.end_col_offset,
+ )
+
+
+class SetLineno:
+ __slots__ = ("_lineno",)
+
+ def __init__(self, lineno: int) -> None:
+ # In Python 3.11 0 is a valid lineno for some instructions (RESUME for example)
+ _check_location(lineno, "lineno", 0 if sys.version_info >= (3, 11) else 1)
+ self._lineno: int = lineno
+
+ @property
+ def lineno(self) -> int:
+ return self._lineno
+
+ def __eq__(self, other: Any) -> bool:
+ if not isinstance(other, SetLineno):
+ return False
+ return self._lineno == other._lineno
+
+
+# --- Pseudo instructions used to represent exception handling (3.11+)
+
+
+class TryBegin:
+ __slots__ = ("target", "push_lasti", "stack_depth")
+
+ def __init__(
+ self,
+ target: Union[Label, "_bytecode.BasicBlock"],
+ push_lasti: bool,
+ stack_depth: Union[int, _UNSET] = UNSET,
+ ) -> None:
+ self.target: Union[Label, "_bytecode.BasicBlock"] = target
+ self.push_lasti: bool = push_lasti
+ self.stack_depth: Union[int, _UNSET] = stack_depth
+
+ def copy(self) -> "TryBegin":
+ return TryBegin(self.target, self.push_lasti, self.stack_depth)
+
+
+class TryEnd:
+ __slots__ = "entry"
+
+ def __init__(self, entry: TryBegin) -> None:
+ self.entry: TryBegin = entry
+
+ def copy(self) -> "TryEnd":
+ return TryEnd(self.entry)
+
+
+T = TypeVar("T", bound="BaseInstr")
+A = TypeVar("A", bound=object)
+
+
+class BaseInstr(Generic[A]):
+ """Abstract instruction."""
+
+ __slots__ = ("_name", "_opcode", "_arg", "_location")
+
+ # Work around an issue with the default value of arg
+ def __init__(
+ self,
+ name: str,
+ arg: A = UNSET, # type: ignore
+ *,
+ lineno: Union[int, None, _UNSET] = UNSET,
+ location: Optional[InstrLocation] = None,
+ ) -> None:
+ self._set(name, arg)
+ if location:
+ self._location = location
+ elif lineno is UNSET:
+ self._location = None
+ else:
+ self._location = InstrLocation(lineno, None, None, None)
+
+ # Work around an issue with the default value of arg
+ def set(self, name: str, arg: A = UNSET) -> None: # type: ignore
+ """Modify the instruction in-place.
+
+ Replace name and arg attributes. Don't modify lineno.
+
+ """
+ self._set(name, arg)
+
+ def require_arg(self) -> bool:
+ """Does the instruction require an argument?"""
+ return opcode_has_argument(self._opcode)
+
+ @property
+ def name(self) -> str:
+ return self._name
+
+ @name.setter
+ def name(self, name: str) -> None:
+ self._set(name, self._arg)
+
+ @property
+ def opcode(self) -> int:
+ return self._opcode
+
+ @opcode.setter
+ def opcode(self, op: int) -> None:
+ if not isinstance(op, int):
+ raise TypeError("operator code must be an int")
+ if 0 <= op <= 255:
+ name = _opcode.opname[op]
+ valid = name != "<%r>" % op
+ else:
+ valid = False
+ if not valid:
+ raise ValueError("invalid operator code")
+
+ self._set(name, self._arg)
+
+ @property
+ def arg(self) -> A:
+ return self._arg
+
+ @arg.setter
+ def arg(self, arg: A):
+ self._set(self._name, arg)
+
+ @property
+ def lineno(self) -> Union[int, _UNSET, None]:
+ return self._location.lineno if self._location is not None else UNSET
+
+ @lineno.setter
+ def lineno(self, lineno: Union[int, _UNSET, None]) -> None:
+ loc = self._location
+ if loc and (
+ loc.end_lineno is not None
+ or loc.col_offset is not None
+ or loc.end_col_offset is not None
+ ):
+ raise RuntimeError(
+ "The lineno of an instruction with detailed location information "
+ "cannot be set."
+ )
+
+ if lineno is UNSET:
+ self._location = None
+ else:
+ self._location = InstrLocation(lineno, None, None, None)
+
+ @property
+ def location(self) -> Optional[InstrLocation]:
+ return self._location
+
+ @location.setter
+ def location(self, location: Optional[InstrLocation]) -> None:
+ if location and not isinstance(location, InstrLocation):
+ raise TypeError(
+ "The instr location must be an instance of InstrLocation or None."
+ )
+ self._location = location
+
+ def stack_effect(self, jump: Optional[bool] = None) -> int:
+ if not self.require_arg():
+ arg = None
+ # 3.11 where LOAD_GLOBAL arg encode whether or we push a null
+ # 3.12 does the same for LOAD_ATTR
+ elif self.name in BITFLAG_INSTRUCTIONS and isinstance(self._arg, tuple):
+ assert len(self._arg) == 2
+ arg = self._arg[0]
+ # 3.12 does a similar trick for LOAD_SUPER_ATTR
+ elif self.name in BITFLAG2_INSTRUCTIONS and isinstance(self._arg, tuple):
+ assert len(self._arg) == 3
+ arg = self._arg[0]
+ elif not isinstance(self._arg, int) or self._opcode in _opcode.hasconst:
+ # Argument is either a non-integer or an integer constant,
+ # not oparg.
+ arg = 0
+ else:
+ arg = self._arg
+
+ return dis.stack_effect(self._opcode, arg, jump=jump)
+
+ def pre_and_post_stack_effect(self, jump: Optional[bool] = None) -> Tuple[int, int]:
+ # Allow to check that execution will not cause a stack underflow
+ _effect = self.stack_effect(jump=jump)
+
+ n = self.name
+ if n in STATIC_STACK_EFFECTS:
+ return STATIC_STACK_EFFECTS[n]
+ elif n in DYNAMIC_STACK_EFFECTS:
+ return DYNAMIC_STACK_EFFECTS[n](_effect, self.arg, jump)
+ else:
+ # For instruction with no special value we simply consider the effect apply
+ # before execution
+ return (_effect, 0)
+
+ def copy(self: T) -> T:
+ return self.__class__(self._name, self._arg, location=self._location)
+
+ def has_jump(self) -> bool:
+ return self._has_jump(self._opcode)
+
+ def is_cond_jump(self) -> bool:
+ """Is a conditional jump?"""
+ # Ex: POP_JUMP_IF_TRUE, JUMP_IF_FALSE_OR_POP
+ # IN 3.11+ the JUMP and the IF are no necessary adjacent in the name.
+ name = self._name
+ return "JUMP_" in name and "IF_" in name
+
+ def is_uncond_jump(self) -> bool:
+ """Is an unconditional jump?"""
+ # JUMP_BACKWARD has been introduced in 3.11+
+ # JUMP_ABSOLUTE was removed in 3.11+
+ return self.name in {
+ "JUMP_FORWARD",
+ "JUMP_ABSOLUTE",
+ "JUMP_BACKWARD",
+ "JUMP_BACKWARD_NO_INTERRUPT",
+ }
+
+ def is_abs_jump(self) -> bool:
+ """Is an absolute jump."""
+ return self._opcode in _opcode.hasjabs
+
+ def is_forward_rel_jump(self) -> bool:
+ """Is a forward relative jump."""
+ return self._opcode in _opcode.hasjrel and "BACKWARD" not in self._name
+
+ def is_backward_rel_jump(self) -> bool:
+ """Is a backward relative jump."""
+ return self._opcode in _opcode.hasjrel and "BACKWARD" in self._name
+
+ def is_final(self) -> bool:
+ if self._name in {
+ "RETURN_VALUE",
+ "RETURN_CONST",
+ "RAISE_VARARGS",
+ "RERAISE",
+ "BREAK_LOOP",
+ "CONTINUE_LOOP",
+ }:
+ return True
+ if self.is_uncond_jump():
+ return True
+ return False
+
+ def __repr__(self) -> str:
+ if self._arg is not UNSET:
+ return "<%s arg=%r location=%s>" % (self._name, self._arg, self._location)
+ else:
+ return "<%s location=%s>" % (self._name, self._location)
+
+ def __eq__(self, other: Any) -> bool:
+ if type(self) is not type(other):
+ return False
+ return self._cmp_key() == other._cmp_key()
+
+ # --- Private API
+
+ _name: str
+
+ _location: Optional[InstrLocation]
+
+ _opcode: int
+
+ _arg: A
+
+ def _set(self, name: str, arg: A) -> None:
+ if not isinstance(name, str):
+ raise TypeError("operation name must be a str")
+ try:
+ opcode = _opcode.opmap[name]
+ except KeyError:
+ raise ValueError(f"invalid operation name: {name}")
+
+ if opcode >= MIN_INSTRUMENTED_OPCODE:
+ raise ValueError(
+ f"operation {name} is an instrumented or pseudo opcode. "
+ "Only base opcodes are supported"
+ )
+
+ self._check_arg(name, opcode, arg)
+
+ self._name = name
+ self._opcode = opcode
+ self._arg = arg
+
+ @staticmethod
+ def _has_jump(opcode) -> bool:
+ return opcode in _opcode.hasjrel or opcode in _opcode.hasjabs
+
+ @abstractmethod
+ def _check_arg(self, name: str, opcode: int, arg: A) -> None:
+ pass
+
+ @abstractmethod
+ def _cmp_key(self) -> Tuple[Optional[InstrLocation], str, Any]:
+ pass
+
+
+InstrArg = Union[
+ int,
+ str,
+ Label,
+ CellVar,
+ FreeVar,
+ "_bytecode.BasicBlock",
+ Compare,
+ Tuple[bool, str],
+ Tuple[bool, bool, str],
+]
+
+
+class Instr(BaseInstr[InstrArg]):
+ __slots__ = ()
+
+ def _cmp_key(self) -> Tuple[Optional[InstrLocation], str, Any]:
+ arg: Any = self._arg
+ if self._opcode in _opcode.hasconst:
+ arg = const_key(arg)
+ return (self._location, self._name, arg)
+
+ def _check_arg(self, name: str, opcode: int, arg: InstrArg) -> None:
+ if name == "EXTENDED_ARG":
+ raise ValueError(
+ "only concrete instruction can contain EXTENDED_ARG, "
+ "highlevel instruction can represent arbitrary argument without it"
+ )
+
+ if opcode_has_argument(opcode):
+ if arg is UNSET:
+ raise ValueError("operation %s requires an argument" % name)
+ else:
+ if arg is not UNSET:
+ raise ValueError("operation %s has no argument" % name)
+
+ if self._has_jump(opcode):
+ if not isinstance(arg, (Label, _bytecode.BasicBlock)):
+ raise TypeError(
+ "operation %s argument type must be "
+ "Label or BasicBlock, got %s" % (name, type(arg).__name__)
+ )
+
+ elif opcode in _opcode.hasfree:
+ if not isinstance(arg, (CellVar, FreeVar)):
+ raise TypeError(
+ "operation %s argument must be CellVar "
+ "or FreeVar, got %s" % (name, type(arg).__name__)
+ )
+
+ elif opcode in _opcode.haslocal or opcode in _opcode.hasname:
+ if name in BITFLAG_INSTRUCTIONS:
+ if not (
+ isinstance(arg, tuple)
+ and len(arg) == 2
+ and isinstance(arg[0], bool)
+ and isinstance(arg[1], str)
+ ):
+ raise TypeError(
+ "operation %s argument must be a tuple[bool, str], "
+ "got %s (value=%s)" % (name, type(arg).__name__, str(arg))
+ )
+
+ elif name in BITFLAG2_INSTRUCTIONS:
+ if not (
+ isinstance(arg, tuple)
+ and len(arg) == 3
+ and isinstance(arg[0], bool)
+ and isinstance(arg[1], bool)
+ and isinstance(arg[2], str)
+ ):
+ raise TypeError(
+ "operation %s argument must be a tuple[bool, bool, str], "
+ "got %s (value=%s)" % (name, type(arg).__name__, str(arg))
+ )
+
+ elif not isinstance(arg, str):
+ raise TypeError(
+ "operation %s argument must be a str, "
+ "got %s" % (name, type(arg).__name__)
+ )
+
+ elif opcode in _opcode.hasconst:
+ if isinstance(arg, Label):
+ raise ValueError(
+ "label argument cannot be used " "in %s operation" % name
+ )
+ if isinstance(arg, _bytecode.BasicBlock):
+ raise ValueError(
+ "block argument cannot be used " "in %s operation" % name
+ )
+
+ elif opcode in _opcode.hascompare:
+ if not isinstance(arg, Compare):
+ raise TypeError(
+ "operation %s argument type must be "
+ "Compare, got %s" % (name, type(arg).__name__)
+ )
+
+ elif opcode in INTRINSIC_1OP:
+ if not isinstance(arg, Intrinsic1Op):
+ raise TypeError(
+ "operation %s argument type must be "
+ "Intrinsic1Op, got %s" % (name, type(arg).__name__)
+ )
+
+ elif opcode in INTRINSIC_2OP:
+ if not isinstance(arg, Intrinsic2Op):
+ raise TypeError(
+ "operation %s argument type must be "
+ "Intrinsic2Op, got %s" % (name, type(arg).__name__)
+ )
+
+ elif opcode_has_argument(opcode):
+ _check_arg_int(arg, name)
diff --git a/lambdas/aws-dd-forwarder-3.127.0/bytecode/py.typed b/lambdas/aws-dd-forwarder-3.127.0/bytecode/py.typed
new file mode 100644
index 0000000..e69de29
diff --git a/lambdas/aws-dd-forwarder-3.127.0/bytecode/version.py b/lambdas/aws-dd-forwarder-3.127.0/bytecode/version.py
new file mode 100644
index 0000000..2d91554
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/bytecode/version.py
@@ -0,0 +1,19 @@
+# This file is auto-generated by setuptools-scm do NOT edit it.
+
+from collections import namedtuple
+
+#: A namedtuple of the version info for the current release.
+_version_info = namedtuple("_version_info", "major minor micro status")
+
+parts = "0.15.1".split(".", 3)
+version_info = _version_info(
+ int(parts[0]),
+ int(parts[1]),
+ int(parts[2]),
+ parts[3] if len(parts) == 4 else "",
+)
+
+# Remove everything but the 'version_info' from this module.
+del namedtuple, _version_info, parts
+
+__version__ = "0.15.1"
diff --git a/lambdas/aws-dd-forwarder-3.127.0/caching/base_tags_cache.py b/lambdas/aws-dd-forwarder-3.127.0/caching/base_tags_cache.py
new file mode 100644
index 0000000..c38aa00
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/caching/base_tags_cache.py
@@ -0,0 +1,172 @@
+import json
+import logging
+import os
+from random import randint
+from time import time
+
+import boto3
+from botocore.exceptions import ClientError
+
+from caching.common import get_last_modified_time
+from settings import (
+ DD_S3_BUCKET_NAME,
+ DD_S3_CACHE_DIRNAME,
+ DD_S3_CACHE_LOCK_TTL_SECONDS,
+ DD_TAGS_CACHE_TTL_SECONDS,
+)
+from telemetry import send_forwarder_internal_metrics
+
+JITTER_MIN = 1
+JITTER_MAX = 100
+DD_TAGS_CACHE_TTL_SECONDS = DD_TAGS_CACHE_TTL_SECONDS + randint(JITTER_MIN, JITTER_MAX)
+
+
+class BaseTagsCache(object):
+ def __init__(
+ self,
+ prefix,
+ cache_filename,
+ cache_lock_filename,
+ tags_ttl_seconds=DD_TAGS_CACHE_TTL_SECONDS,
+ ):
+ self.cache_dirname = DD_S3_CACHE_DIRNAME
+ self.tags_ttl_seconds = tags_ttl_seconds
+ self.tags_by_id = {}
+ self.last_tags_fetch_time = 0
+ self.cache_prefix = prefix
+ self.cache_filename = cache_filename
+ self.cache_lock_filename = cache_lock_filename
+ self.logger = logging.getLogger()
+ self.logger.setLevel(
+ logging.getLevelName(os.environ.get("DD_LOG_LEVEL", "INFO").upper())
+ )
+ self.resource_tagging_client = boto3.client("resourcegroupstaggingapi")
+ self.s3_client = boto3.resource("s3")
+
+ def get_resources_paginator(self):
+ return self.resource_tagging_client.get_paginator("get_resources")
+
+ def get_cache_name_with_prefix(self):
+ return f"{self.cache_dirname}/{self.cache_prefix}_{self.cache_filename}"
+
+ def get_cache_lock_with_prefix(self):
+ return f"{self.cache_dirname}/{self.cache_prefix}_{self.cache_lock_filename}"
+
+ def write_cache_to_s3(self, data):
+ """Writes tags cache to s3"""
+ try:
+ self.logger.debug("Trying to write data to s3: {}".format(data))
+ s3_object = self.s3_client.Object(
+ DD_S3_BUCKET_NAME, self.get_cache_name_with_prefix()
+ )
+ s3_object.put(Body=(bytes(json.dumps(data).encode("UTF-8"))))
+ except ClientError:
+ send_forwarder_internal_metrics("s3_cache_write_failure")
+ self.logger.debug("Unable to write new cache to S3", exc_info=True)
+
+ def acquire_s3_cache_lock(self):
+ """Acquire cache lock"""
+ cache_lock_object = self.s3_client.Object(
+ DD_S3_BUCKET_NAME, self.get_cache_lock_with_prefix()
+ )
+ try:
+ file_content = cache_lock_object.get()
+
+ # check lock file expiration
+ last_modified_unix_time = get_last_modified_time(file_content)
+ if last_modified_unix_time + DD_S3_CACHE_LOCK_TTL_SECONDS >= time():
+ return False
+ except Exception:
+ self.logger.debug("Unable to get cache lock file")
+
+ # lock file doesn't exist, create file to acquire lock
+ try:
+ cache_lock_object.put(Body=(bytes("lock".encode("UTF-8"))))
+ send_forwarder_internal_metrics("s3_cache_lock_acquired")
+ self.logger.debug("S3 cache lock acquired")
+ except ClientError:
+ self.logger.debug("Unable to write S3 cache lock file", exc_info=True)
+ return False
+
+ return True
+
+ def release_s3_cache_lock(self):
+ """Release cache lock"""
+ try:
+ cache_lock_object = self.s3_client.Object(
+ DD_S3_BUCKET_NAME, self.get_cache_lock_with_prefix()
+ )
+ cache_lock_object.delete()
+ send_forwarder_internal_metrics("s3_cache_lock_released")
+ self.logger.debug("S3 cache lock released")
+ except ClientError:
+ send_forwarder_internal_metrics("s3_cache_lock_release_failure")
+ self.logger.debug("Unable to release S3 cache lock", exc_info=True)
+
+ def get_cache_from_s3(self):
+ """Retrieves tags cache from s3 and returns the body along with
+ the last modified datetime for the cache"""
+ cache_object = self.s3_client.Object(
+ DD_S3_BUCKET_NAME, self.get_cache_name_with_prefix()
+ )
+ try:
+ file_content = cache_object.get()
+ tags_cache = json.loads(file_content["Body"].read().decode("utf-8"))
+ last_modified_unix_time = get_last_modified_time(file_content)
+ except:
+ send_forwarder_internal_metrics("s3_cache_fetch_failure")
+ self.logger.debug("Unable to fetch cache from S3", exc_info=True)
+ return {}, -1
+
+ return tags_cache, last_modified_unix_time
+
+ def _refresh(self):
+ """Populate the tags in the local cache by getting cache from s3
+ If cache not in s3, then cache is built using build_tags_cache
+ """
+ self.last_tags_fetch_time = time()
+
+ # If the custom tag fetch env var is not set to true do not fetch
+ if not self.should_fetch_tags():
+ self.logger.debug(
+ "Not fetching custom tags because the env variable for the cache {} is not set to true".format(
+ self.cache_filename
+ )
+ )
+ return
+
+ tags_fetched, last_modified = self.get_cache_from_s3()
+
+ if self._is_expired(last_modified):
+ send_forwarder_internal_metrics("s3_cache_expired")
+ self.logger.debug("S3 cache expired, rebuilding cache")
+ lock_acquired = self.acquire_s3_cache_lock()
+ if lock_acquired:
+ success, new_tags_fetched = self.build_tags_cache()
+ if success:
+ self.tags_by_id = new_tags_fetched
+ self.write_cache_to_s3(self.tags_by_id)
+ elif tags_fetched != {}:
+ self.tags_by_id = tags_fetched
+
+ self.release_s3_cache_lock()
+ # s3 cache fetch succeeded and isn't expired
+ elif last_modified > -1:
+ self.tags_by_id = tags_fetched
+
+ def _is_expired(self, last_modified=None):
+ """Returns bool for whether the fetch TTL has expired"""
+ if not last_modified:
+ last_modified = self.last_tags_fetch_time
+
+ earliest_time_to_refetch_tags = last_modified + self.tags_ttl_seconds
+ return time() > earliest_time_to_refetch_tags
+
+ def should_fetch_tags(self):
+ raise Exception("SHOULD FETCH TAGS MUST BE DEFINED FOR TAGS CACHES")
+
+ def get(self, key):
+ raise Exception("GET TAGS MUST BE DEFINED FOR TAGS CACHES")
+
+ def build_tags_cache(self):
+ raise Exception("BUILD TAGS MUST BE DEFINED FOR TAGS CACHES")
diff --git a/lambdas/aws-dd-forwarder-3.127.0/caching/cache_layer.py b/lambdas/aws-dd-forwarder-3.127.0/caching/cache_layer.py
new file mode 100644
index 0000000..eef6a53
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/caching/cache_layer.py
@@ -0,0 +1,24 @@
+from caching.cloudwatch_log_group_cache import CloudwatchLogGroupTagsCache
+from caching.step_functions_cache import StepFunctionsTagsCache
+from caching.s3_tags_cache import S3TagsCache
+from caching.lambda_cache import LambdaTagsCache
+
+
+class CacheLayer:
+ def __init__(self, prefix):
+ self._cloudwatch_log_group_cache = CloudwatchLogGroupTagsCache(prefix)
+ self._s3_tags_cache = S3TagsCache(prefix)
+ self._step_functions_cache = StepFunctionsTagsCache(prefix)
+ self._lambda_cache = LambdaTagsCache(prefix)
+
+ def get_cloudwatch_log_group_tags_cache(self):
+ return self._cloudwatch_log_group_cache
+
+ def get_s3_tags_cache(self):
+ return self._s3_tags_cache
+
+ def get_step_functions_tags_cache(self):
+ return self._step_functions_cache
+
+ def get_lambda_tags_cache(self):
+ return self._lambda_cache
diff --git a/lambdas/aws-dd-forwarder-3.127.0/caching/cloudwatch_log_group_cache.py b/lambdas/aws-dd-forwarder-3.127.0/caching/cloudwatch_log_group_cache.py
new file mode 100644
index 0000000..f20d9a5
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/caching/cloudwatch_log_group_cache.py
@@ -0,0 +1,194 @@
+import json
+import logging
+import os
+from random import randint
+from time import time
+
+import boto3
+from botocore.config import Config
+
+from caching.common import sanitize_aws_tag_string
+from settings import (
+ DD_S3_BUCKET_NAME,
+ DD_S3_CACHE_DIRNAME,
+ DD_S3_LOG_GROUP_CACHE_DIRNAME,
+ DD_TAGS_CACHE_TTL_SECONDS,
+)
+from telemetry import send_forwarder_internal_metrics
+
+
+class CloudwatchLogGroupTagsCache:
+ def __init__(
+ self,
+ prefix,
+ ):
+ self.cache_dirname = f"{DD_S3_CACHE_DIRNAME}/{DD_S3_LOG_GROUP_CACHE_DIRNAME}"
+ self.cache_ttl_seconds = DD_TAGS_CACHE_TTL_SECONDS
+ self.bucket_name = DD_S3_BUCKET_NAME
+ self.cache_prefix = prefix
+ self.tags_by_log_group = {}
+ # We need to use the standard retry mode for the Cloudwatch Logs client that defaults to 3 retries
+ self.cloudwatch_logs_client = boto3.client(
+ "logs", config=Config(retries={"mode": "standard"})
+ )
+ self.s3_client = boto3.client("s3")
+
+ self.logger = logging.getLogger()
+ self.logger.setLevel(
+ logging.getLevelName(os.environ.get("DD_LOG_LEVEL", "INFO").upper())
+ )
+
+ # Initialize the cache
+ if self._should_fetch_tags():
+ self._build_tags_cache()
+
+ def get(self, log_group_arn):
+ """Get the tags for the Cloudwatch Log Group from the cache
+
+ Will refetch the tags if they are out of date, or a log group is encountered
+ which isn't in the tag list
+
+ Args:
+ key (str): the key we're getting tags from the cache for
+
+ Returns:
+ log_group_tags (str[]): the list of "key:value" Datadog tag strings
+ """
+ # If the custom tag fetch env var is not set to true do not fetch tags
+ if not self._should_fetch_tags():
+ self.logger.debug(
+ "Not fetching custom tags because the env variable DD_FETCH_LOG_GROUP_TAGS is "
+ "not set to true"
+ )
+ return []
+
+ return self._fetch_log_group_tags(log_group_arn)
+
+ def _should_fetch_tags(self):
+ return os.environ.get("DD_FETCH_LOG_GROUP_TAGS", "false").lower() == "true"
+
+ def _build_tags_cache(self):
+ try:
+ prefix = self._get_cache_file_prefix()
+ response = self.s3_client.list_objects_v2(
+ Bucket=DD_S3_BUCKET_NAME, Prefix=prefix
+ )
+ cache_files = [content["Key"] for content in response.get("Contents", [])]
+ for cache_file in cache_files:
+ log_group_tags, last_modified = self._get_log_group_tags_from_cache(
+ cache_file
+ )
+ if log_group_tags and not self._is_expired(last_modified):
+ log_group = cache_file.split("/")[-1].split(".")[0]
+ self.tags_by_log_group[log_group] = {
+ "tags": log_group_tags,
+ "last_modified": last_modified,
+ }
+ self.logger.debug(
+ f"loggroup_tags_cache initialized successfully {self.tags_by_log_group}"
+ )
+ except Exception:
+ self.logger.exception("failed to build log group tags cache", exc_info=True)
+
+ def _fetch_log_group_tags(self, log_group_arn):
+ # first, check in-memory cache
+ log_group_tags_struct = self.tags_by_log_group.get(log_group_arn, None)
+ if log_group_tags_struct and not self._is_expired(
+ log_group_tags_struct.get("last_modified", None)
+ ):
+ send_forwarder_internal_metrics("loggroup_local_cache_hit")
+ return log_group_tags_struct.get("tags", [])
+
+ # then, check cache file, update and return
+ cache_file_name = self._get_cache_file_name(log_group_arn)
+ log_group_tags, last_modified = self._get_log_group_tags_from_cache(
+ cache_file_name
+ )
+ if log_group_tags and not self._is_expired(last_modified):
+ self.tags_by_log_group[log_group_arn] = {
+ "tags": log_group_tags,
+ "last_modified": time(),
+ }
+ send_forwarder_internal_metrics("loggroup_s3_cache_hit")
+ return log_group_tags
+
+ # finally, make an api call, update and return
+ log_group_tags = self._get_log_group_tags(log_group_arn) or []
+ self._update_log_group_tags_cache(log_group_arn, log_group_tags)
+ self.tags_by_log_group[log_group_arn] = {
+ "tags": log_group_tags,
+ "last_modified": time(),
+ }
+
+ return log_group_tags
+
+ def _get_log_group_tags_from_cache(self, cache_file_name):
+ try:
+ response = self.s3_client.get_object(
+ Bucket=self.bucket_name, Key=cache_file_name
+ )
+ tags_cache = json.loads(response.get("Body").read().decode("utf-8"))
+ last_modified_unix_time = int(response.get("LastModified").timestamp())
+ except Exception:
+ send_forwarder_internal_metrics("loggroup_cache_fetch_failure")
+ self.logger.exception(
+ "Failed to get log group tags from cache", exc_info=True
+ )
+ return None, -1
+
+ return tags_cache, last_modified_unix_time
+
+ def _update_log_group_tags_cache(self, log_group, tags):
+ cache_file_name = self._get_cache_file_name(log_group)
+ try:
+ self.s3_client.put_object(
+ Bucket=self.bucket_name,
+ Key=cache_file_name,
+ Body=(bytes(json.dumps(tags).encode("UTF-8"))),
+ )
+ except Exception:
+ send_forwarder_internal_metrics("loggroup_cache_write_failure")
+ self.logger.exception(
+ "Failed to update log group tags cache", exc_info=True
+ )
+
+ def _is_expired(self, last_modified):
+ if not last_modified:
+ return True
+
+ # add a random number of seconds to avoid having all tags refetched at the same time
+ earliest_time_to_refetch_tags = (
+ last_modified + self.cache_ttl_seconds + randint(1, 100)
+ )
+ return time() > earliest_time_to_refetch_tags
+
+ def _get_cache_file_name(self, log_group_arn):
+ log_group_name = log_group_arn.replace("/", "_").replace(":", "_")
+ return f"{self._get_cache_file_prefix()}/{log_group_name}.json"
+
+ def _get_cache_file_prefix(self):
+ return f"{self.cache_dirname}/{self.cache_prefix}"
+
+ def _get_log_group_tags(self, log_group_arn):
+ response = None
+ try:
+ send_forwarder_internal_metrics("list_tags_log_group_api_call")
+ response = self.cloudwatch_logs_client.list_tags_for_resource(
+ resourceArn=log_group_arn
+ )
+ except Exception:
+ self.logger.exception("Failed to get log group tags", exc_info=True)
+ formatted_tags = None
+ if response is not None:
+ formatted_tags = [
+ (
+ "{key}:{value}".format(
+ key=sanitize_aws_tag_string(k, remove_colons=True),
+ value=sanitize_aws_tag_string(v, remove_leading_digits=False),
+ )
+ if v
+ else sanitize_aws_tag_string(k, remove_colons=True)
+ )
+ for k, v in response["tags"].items()
+ ]
+ return formatted_tags
diff --git a/lambdas/aws-dd-forwarder-3.127.0/caching/common.py b/lambdas/aws-dd-forwarder-3.127.0/caching/common.py
new file mode 100644
index 0000000..7d7db88
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/caching/common.py
@@ -0,0 +1,103 @@
+import os
+import datetime
+import logging
+import re
+from collections import defaultdict
+
+logger = logging.getLogger()
+logger.setLevel(logging.getLevelName(os.environ.get("DD_LOG_LEVEL", "INFO").upper()))
+
+
+_other_chars = r"\w:\-\.\/"
+Sanitize = re.compile(r"[^%s]" % _other_chars, re.UNICODE).sub
+Dedupe = re.compile(r"_+", re.UNICODE).sub
+FixInit = re.compile(r"^[_\d]*", re.UNICODE).sub
+
+
+def get_last_modified_time(s3_file):
+ last_modified_str = s3_file["ResponseMetadata"]["HTTPHeaders"]["last-modified"]
+ last_modified_date = datetime.datetime.strptime(
+ last_modified_str, "%a, %d %b %Y %H:%M:%S %Z"
+ )
+ last_modified_unix_time = int(last_modified_date.strftime("%s"))
+ return last_modified_unix_time
+
+
+def parse_get_resources_response_for_tags_by_arn(get_resources_page):
+ """Parses a page of GetResources response for the mapping from ARN to tags
+
+ Args:
+ get_resources_page (dict[]>): one page of the GetResources response.
+ Partial example:
+ {"ResourceTagMappingList": [{
+ 'ResourceARN': 'arn:aws:lambda:us-east-1:123497598159:function:my-test-lambda',
+ 'Tags': [{'Key': 'stage', 'Value': 'dev'}, {'Key': 'team', 'Value': 'serverless'}]
+ }]}
+
+ Returns:
+ tags_by_arn (dict): Lambda tag lists keyed by ARN
+ """
+ tags_by_arn = defaultdict(list)
+
+ aws_resouce_tag_mappings = get_resources_page["ResourceTagMappingList"]
+ for aws_resource_tag_mapping in aws_resouce_tag_mappings:
+ function_arn = aws_resource_tag_mapping["ResourceARN"]
+ lowercase_function_arn = function_arn.lower()
+
+ raw_aws_tags = aws_resource_tag_mapping["Tags"]
+ tags = map(get_dd_tag_string_from_aws_dict, raw_aws_tags)
+
+ tags_by_arn[lowercase_function_arn] += tags
+
+ return tags_by_arn
+
+
+def get_dd_tag_string_from_aws_dict(aws_key_value_tag_dict):
+ """Converts the AWS dict tag format to the dd key:value string format and truncates to 200 characters
+
+ Args:
+ aws_key_value_tag_dict (dict): the dict the GetResources endpoint returns for a tag
+ ex: { "Key": "creator", "Value": "swf"}
+
+ Returns:
+ key:value colon-separated string built from the dict
+ ex: "creator:swf"
+ """
+ key = sanitize_aws_tag_string(aws_key_value_tag_dict["Key"], remove_colons=True)
+ value = sanitize_aws_tag_string(
+ aws_key_value_tag_dict.get("Value"), remove_leading_digits=False
+ )
+ # Value is optional in DD and AWS
+ if not value:
+ return key
+ return f"{key}:{value}"[0:200]
+
+
+def sanitize_aws_tag_string(tag, remove_colons=False, remove_leading_digits=True):
+ """Convert characters banned from DD but allowed in AWS tags to underscores"""
+ global Sanitize, Dedupe, FixInit
+
+ # 1. Replace colons with _
+ # 2. Convert to all lowercase unicode string
+ # 3. Convert bad characters to underscores
+ # 4. Dedupe contiguous underscores
+ # 5. Remove initial underscores/digits such that the string
+ # starts with an alpha char
+ # FIXME: tag normalization incorrectly supports tags starting
+ # with a ':', but this behavior should be phased out in future
+ # as it results in unqueryable data. See dogweb/#11193
+ # 6. Strip trailing underscores
+
+ if len(tag) == 0:
+ # if tag is empty, nothing to do
+ return tag
+
+ if remove_colons:
+ tag = tag.replace(":", "_")
+ tag = Dedupe("_", Sanitize("_", tag.lower()))
+ if remove_leading_digits:
+ first_char = tag[0]
+ if first_char == "_" or "0" <= first_char <= "9":
+ tag = FixInit("", tag)
+ tag = tag.rstrip("_")
+ return tag
diff --git a/lambdas/aws-dd-forwarder-3.127.0/caching/lambda_cache.py b/lambdas/aws-dd-forwarder-3.127.0/caching/lambda_cache.py
new file mode 100644
index 0000000..e1d28e0
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/caching/lambda_cache.py
@@ -0,0 +1,90 @@
+import os
+
+from botocore.exceptions import ClientError
+
+from caching.base_tags_cache import BaseTagsCache
+from caching.common import parse_get_resources_response_for_tags_by_arn
+from settings import (
+ DD_S3_LAMBDA_CACHE_FILENAME,
+ DD_S3_LAMBDA_CACHE_LOCK_FILENAME,
+ GET_RESOURCES_LAMBDA_FILTER,
+)
+from telemetry import send_forwarder_internal_metrics
+
+
+class LambdaTagsCache(BaseTagsCache):
+ def __init__(self, prefix):
+ super().__init__(
+ prefix, DD_S3_LAMBDA_CACHE_FILENAME, DD_S3_LAMBDA_CACHE_LOCK_FILENAME
+ )
+
+ def should_fetch_tags(self):
+ return os.environ.get("DD_FETCH_LAMBDA_TAGS", "false").lower() == "true"
+
+ def build_tags_cache(self):
+ """Makes API calls to GetResources to get the live tags of the account's Lambda functions
+
+ Returns an empty dict instead of fetching custom tags if the tag fetch env variable is not set to true
+
+ Returns:
+ tags_by_arn_cache (dict): each Lambda's tags in a dict keyed by ARN
+ """
+ tags_fetch_success = False
+ tags_by_arn_cache = {}
+ resource_paginator = self.get_resources_paginator()
+
+ try:
+ for page in resource_paginator.paginate(
+ ResourceTypeFilters=[GET_RESOURCES_LAMBDA_FILTER], ResourcesPerPage=100
+ ):
+ send_forwarder_internal_metrics("get_resources_api_calls")
+ page_tags_by_arn = parse_get_resources_response_for_tags_by_arn(page)
+ tags_by_arn_cache.update(page_tags_by_arn)
+ tags_fetch_success = True
+
+ except ClientError as e:
+ self.logger.exception(
+ "Encountered a ClientError when trying to fetch tags. You may need to give "
+ "this Lambda's role the 'tag:GetResources' permission"
+ )
+ additional_tags = [
+ f"http_status_code:{e.response['ResponseMetadata']['HTTPStatusCode']}"
+ ]
+ send_forwarder_internal_metrics(
+ "client_error", additional_tags=additional_tags
+ )
+ tags_fetch_success = False
+
+ self.logger.debug(
+ "Built this tags cache from GetResources API calls: %s", tags_by_arn_cache
+ )
+
+ return tags_fetch_success, tags_by_arn_cache
+
+ def get(self, key):
+ """Get the tags for the Lambda function from the cache
+
+ Will refetch the tags if they are out of date, or a lambda arn is encountered
+ which isn't in the tag list
+
+ Note: the ARNs in the cache have been lowercased, so resource_arn must be lowercased
+
+ Args:
+ key (str): the key we're getting tags from the cache for
+
+ Returns:
+ lambda_tags (str[]): the list of "key:value" Datadog tag strings
+ """
+ if not self.should_fetch_tags():
+ self.logger.debug(
+ "Not fetching lambda function tags because the env variable DD_FETCH_LAMBDA_TAGS is "
+ "not set to true"
+ )
+ return []
+
+ if self._is_expired():
+ send_forwarder_internal_metrics("local_lambda_cache_expired")
+ self.logger.debug("Local cache expired, fetching cache from S3")
+ self._refresh()
+
+ return self.tags_by_id.get(key, [])
diff --git a/lambdas/aws-dd-forwarder-3.127.0/caching/s3_tags_cache.py b/lambdas/aws-dd-forwarder-3.127.0/caching/s3_tags_cache.py
new file mode 100644
index 0000000..b60c873
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/caching/s3_tags_cache.py
@@ -0,0 +1,64 @@
+from botocore.exceptions import ClientError
+from caching.base_tags_cache import BaseTagsCache
+from caching.common import parse_get_resources_response_for_tags_by_arn
+from telemetry import send_forwarder_internal_metrics
+from settings import (
+ DD_S3_TAGS_CACHE_FILENAME,
+ DD_S3_TAGS_CACHE_LOCK_FILENAME,
+ GET_RESOURCES_S3_FILTER,
+)
+
+
+class S3TagsCache(BaseTagsCache):
+ def __init__(self, prefix):
+ super().__init__(
+ prefix, DD_S3_TAGS_CACHE_FILENAME, DD_S3_TAGS_CACHE_LOCK_FILENAME
+ )
+
+ def should_fetch_tags(self):
+ return True
+
+ def build_tags_cache(self):
+ """Makes API calls to GetResources to get the live tags of the account's S3 buckets
+ Returns an empty dict instead of fetching custom tags if the tag fetch env variable is not set to true
+ Returns:
+ tags_by_arn_cache (dict): each S3 bucket's tags in a dict keyed by ARN
+ """
+ tags_fetch_success = False
+ tags_by_arn_cache = {}
+ resource_paginator = self.get_resources_paginator()
+
+ try:
+ for page in resource_paginator.paginate(
+ ResourceTypeFilters=[GET_RESOURCES_S3_FILTER], ResourcesPerPage=100
+ ):
+ send_forwarder_internal_metrics("get_s3_resources_api_calls")
+ page_tags_by_arn = parse_get_resources_response_for_tags_by_arn(page)
+ tags_by_arn_cache.update(page_tags_by_arn)
+ tags_fetch_success = True
+ except ClientError as e:
+ self.logger.exception(
+ "Encountered a ClientError when trying to fetch tags. You may need to give "
+ "this Lambda's role the 'tag:GetResources' permission"
+ )
+ additional_tags = [
+ f"http_status_code:{e.response['ResponseMetadata']['HTTPStatusCode']}"
+ ]
+ send_forwarder_internal_metrics(
+ "client_error", additional_tags=additional_tags
+ )
+ tags_fetch_success = False
+
+ self.logger.debug(
+ "Built this tags cache from GetResources API calls: %s", tags_by_arn_cache
+ )
+
+ return tags_fetch_success, tags_by_arn_cache
+
+ def get(self, bucket_arn):
+ if self._is_expired():
+ send_forwarder_internal_metrics("local_s3_tags_cache_expired")
+ self.logger.debug("Local cache expired, fetching cache from S3")
+ self._refresh()
+
+ return self.tags_by_id.get(bucket_arn, [])
diff --git a/lambdas/aws-dd-forwarder-3.127.0/caching/step_functions_cache.py b/lambdas/aws-dd-forwarder-3.127.0/caching/step_functions_cache.py
new file mode 100644
index 0000000..4b2c497
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/caching/step_functions_cache.py
@@ -0,0 +1,144 @@
+import os
+from botocore.exceptions import ClientError
+from caching.base_tags_cache import BaseTagsCache
+from caching.common import (
+ sanitize_aws_tag_string,
+ parse_get_resources_response_for_tags_by_arn,
+)
+from telemetry import send_forwarder_internal_metrics
+from settings import (
+ DD_S3_STEP_FUNCTIONS_CACHE_FILENAME,
+ DD_S3_STEP_FUNCTIONS_CACHE_LOCK_FILENAME,
+ GET_RESOURCES_STEP_FUNCTIONS_FILTER,
+)
+
+
+class StepFunctionsTagsCache(BaseTagsCache):
+ def __init__(self, prefix):
+ super().__init__(
+ prefix,
+ DD_S3_STEP_FUNCTIONS_CACHE_FILENAME,
+ DD_S3_STEP_FUNCTIONS_CACHE_LOCK_FILENAME,
+ )
+
+ def should_fetch_tags(self):
+ return os.environ.get("DD_FETCH_STEP_FUNCTIONS_TAGS", "false").lower() == "true"
+
+ def build_tags_cache(self):
+ """Makes API calls to GetResources to get the live tags of the account's Step Functions
+ Returns an empty dict instead of fetching custom tags if the tag fetch env variable is not
+ set to true.
+ Returns:
+ tags_by_arn_cache (dict): each Lambda's tags in a dict keyed by ARN
+ """
+ tags_fetch_success = False
+ tags_by_arn_cache = {}
+ get_resources_paginator = self.get_resources_paginator()
+
+ try:
+ for page in get_resources_paginator.paginate(
+ ResourceTypeFilters=[GET_RESOURCES_STEP_FUNCTIONS_FILTER],
+ ResourcesPerPage=100,
+ ):
+ send_forwarder_internal_metrics(
+ "step_functions_get_resources_api_calls"
+ )
+ page_tags_by_arn = parse_get_resources_response_for_tags_by_arn(page)
+ tags_by_arn_cache.update(page_tags_by_arn)
+ tags_fetch_success = True
+
+ except ClientError as e:
+ self.logger.exception(
+ "Encountered a ClientError when trying to fetch tags. You may need to give "
+ "this Lambda's role the 'tag:GetResources' permission"
+ )
+ additional_tags = [
+ f"http_status_code:{e.response['ResponseMetadata']['HTTPStatusCode']}"
+ ]
+ send_forwarder_internal_metrics(
+ "client_error", additional_tags=additional_tags
+ )
+
+ self.logger.debug(
+ "All Step Functions tags refreshed: {}".format(tags_by_arn_cache)
+ )
+
+ return tags_fetch_success, tags_by_arn_cache
+
+ def get(self, state_machine_arn):
+ """Get the tags for the Step Functions from the cache
+
+ Will re-fetch the tags if they are out of date, or a log group is encountered
+ which isn't in the tag list
+
+ Args:
+ state_machine_arn (str): the key we're getting tags from the cache for
+
+ Returns:
+ state_machine_tags (List[str]): the list of "key:value" Datadog tag strings
+ """
+ if self._is_expired():
+ send_forwarder_internal_metrics("local_step_functions_tags_cache_expired")
+ self.logger.debug( # noqa: F821
+ "Local cache expired for Step Functions tags. Fetching cache from S3"
+ )
+ self._refresh()
+
+ state_machine_tags = self.tags_by_id.get(state_machine_arn, None)
+ if state_machine_tags is None:
+ # If the custom tag fetch env var is not set to true do not fetch
+ if not self.should_fetch_tags():
+ self.logger.debug(
+ "Not fetching custom tags because the env variable DD_FETCH_STEP_FUNCTIONS_TAGS"
+ " is not set to true"
+ )
+ return []
+ state_machine_tags = self._get_state_machine_tags(state_machine_arn) or []
+ self.tags_by_id[state_machine_arn] = state_machine_tags
+
+ return state_machine_tags
+
+ def _get_state_machine_tags(self, state_machine_arn: str):
+ """Return a list of tags of a state machine in dd format (max 200 chars)
+
+ Example response from get source api:
+ {
+ "ResourceTagMappingList": [
+ {
+ "ResourceARN": "arn:aws:states:us-east-1:1234567890:stateMachine:example-machine",
+ "Tags": [
+ {
+ "Key": "ENV",
+ "Value": "staging"
+ }
+ ]
+ }
+ ]
+ }
+
+ Args:
+ state_machine_arn (str): the key we're getting tags from the cache for
+ Returns:
+ state_machine_arn (List[str]): e.g. ["k1:v1", "k2:v2"]
+ """
+ response = None
+ formatted_tags = []
+
+ try:
+ send_forwarder_internal_metrics("get_state_machine_tags")
+ response = self.resource_tagging_client.get_resources(
+ ResourceARNList=[state_machine_arn]
+ )
+ except Exception as e:
+ self.logger.exception(f"Failed to get Step Functions tags due to {e}")
+
+ if response and len(response.get("ResourceTagMappingList", {})) > 0:
+ resource_dict = response.get("ResourceTagMappingList")[0]
+ for a_tag in resource_dict.get("Tags", []):
+ key = sanitize_aws_tag_string(a_tag["Key"], remove_colons=True)
+ value = sanitize_aws_tag_string(
+ a_tag.get("Value"), remove_leading_digits=False
+ )
+ formatted_tags.append(f"{key}:{value}"[:200]) # same logic as lambda
+
+ return formatted_tags
diff --git a/lambdas/aws-dd-forwarder-3.127.0/cattr/.DS_Store b/lambdas/aws-dd-forwarder-3.127.0/cattr/.DS_Store
new file mode 100644
index 0000000..d86ee78
Binary files /dev/null and b/lambdas/aws-dd-forwarder-3.127.0/cattr/.DS_Store differ
diff --git a/lambdas/aws-dd-forwarder-3.127.0/cattr/__init__.py b/lambdas/aws-dd-forwarder-3.127.0/cattr/__init__.py
new file mode 100644
index 0000000..6c262fe
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/cattr/__init__.py
@@ -0,0 +1,25 @@
+from .converters import BaseConverter, Converter, GenConverter, UnstructureStrategy
+from .gen import override
+
+__all__ = (
+ "global_converter",
+ "unstructure",
+ "structure",
+ "structure_attrs_fromtuple",
+ "structure_attrs_fromdict",
+ "UnstructureStrategy",
+ "BaseConverter",
+ "Converter",
+ "GenConverter",
+ "override",
+)
+from cattrs import global_converter
+
+unstructure = global_converter.unstructure
+structure = global_converter.structure
+structure_attrs_fromtuple = global_converter.structure_attrs_fromtuple
+structure_attrs_fromdict = global_converter.structure_attrs_fromdict
+register_structure_hook = global_converter.register_structure_hook
+register_structure_hook_func = global_converter.register_structure_hook_func
+register_unstructure_hook = global_converter.register_unstructure_hook
+register_unstructure_hook_func = global_converter.register_unstructure_hook_func
diff --git a/lambdas/aws-dd-forwarder-3.127.0/cattr/converters.py b/lambdas/aws-dd-forwarder-3.127.0/cattr/converters.py
new file mode 100644
index 0000000..4434fe5
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/cattr/converters.py
@@ -0,0 +1,8 @@
+from cattrs.converters import (
+ BaseConverter,
+ Converter,
+ GenConverter,
+ UnstructureStrategy,
+)
+
+__all__ = ["BaseConverter", "Converter", "GenConverter", "UnstructureStrategy"]
diff --git a/lambdas/aws-dd-forwarder-3.127.0/cattr/disambiguators.py b/lambdas/aws-dd-forwarder-3.127.0/cattr/disambiguators.py
new file mode 100644
index 0000000..f10797a
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/cattr/disambiguators.py
@@ -0,0 +1,3 @@
+from cattrs.disambiguators import create_uniq_field_dis_func
+
+__all__ = ["create_uniq_field_dis_func"]
diff --git a/lambdas/aws-dd-forwarder-3.127.0/cattr/dispatch.py b/lambdas/aws-dd-forwarder-3.127.0/cattr/dispatch.py
new file mode 100644
index 0000000..2474247
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/cattr/dispatch.py
@@ -0,0 +1,3 @@
+from cattrs.dispatch import FunctionDispatch, MultiStrategyDispatch
+
+__all__ = ["FunctionDispatch", "MultiStrategyDispatch"]
diff --git a/lambdas/aws-dd-forwarder-3.127.0/cattr/errors.py b/lambdas/aws-dd-forwarder-3.127.0/cattr/errors.py
new file mode 100644
index 0000000..af092e9
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/cattr/errors.py
@@ -0,0 +1,15 @@
+from cattrs.errors import (
+ BaseValidationError,
+ ClassValidationError,
+ ForbiddenExtraKeysError,
+ IterableValidationError,
+ StructureHandlerNotFoundError,
+)
+
+__all__ = [
+ "BaseValidationError",
+ "ClassValidationError",
+ "ForbiddenExtraKeysError",
+ "IterableValidationError",
+ "StructureHandlerNotFoundError",
+]
diff --git a/lambdas/aws-dd-forwarder-3.127.0/cattr/gen.py b/lambdas/aws-dd-forwarder-3.127.0/cattr/gen.py
new file mode 100644
index 0000000..b1f63b5
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/cattr/gen.py
@@ -0,0 +1,21 @@
+from cattrs.cols import iterable_unstructure_factory as make_iterable_unstructure_fn
+from cattrs.gen import (
+ make_dict_structure_fn,
+ make_dict_unstructure_fn,
+ make_hetero_tuple_unstructure_fn,
+ make_mapping_structure_fn,
+ make_mapping_unstructure_fn,
+ override,
+)
+from cattrs.gen._consts import AttributeOverride
+
+__all__ = [
+ "AttributeOverride",
+ "make_dict_structure_fn",
+ "make_dict_unstructure_fn",
+ "make_hetero_tuple_unstructure_fn",
+ "make_iterable_unstructure_fn",
+ "make_mapping_structure_fn",
+ "make_mapping_unstructure_fn",
+ "override",
+]
diff --git a/lambdas/aws-dd-forwarder-3.127.0/cattr/preconf/__init__.py b/lambdas/aws-dd-forwarder-3.127.0/cattr/preconf/__init__.py
new file mode 100644
index 0000000..fa6ad35
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/cattr/preconf/__init__.py
@@ -0,0 +1,3 @@
+from cattrs.preconf import validate_datetime
+
+__all__ = ["validate_datetime"]
diff --git a/lambdas/aws-dd-forwarder-3.127.0/cattr/preconf/bson.py b/lambdas/aws-dd-forwarder-3.127.0/cattr/preconf/bson.py
new file mode 100644
index 0000000..4ac9743
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/cattr/preconf/bson.py
@@ -0,0 +1,5 @@
+"""Preconfigured converters for bson."""
+
+from cattrs.preconf.bson import BsonConverter, configure_converter, make_converter
+
+__all__ = ["BsonConverter", "configure_converter", "make_converter"]
diff --git a/lambdas/aws-dd-forwarder-3.127.0/cattr/preconf/json.py b/lambdas/aws-dd-forwarder-3.127.0/cattr/preconf/json.py
new file mode 100644
index 0000000..d590bd6
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/cattr/preconf/json.py
@@ -0,0 +1,5 @@
+"""Preconfigured converters for the stdlib json."""
+
+from cattrs.preconf.json import JsonConverter, configure_converter, make_converter
+
+__all__ = ["configure_converter", "JsonConverter", "make_converter"]
diff --git a/lambdas/aws-dd-forwarder-3.127.0/cattr/preconf/msgpack.py b/lambdas/aws-dd-forwarder-3.127.0/cattr/preconf/msgpack.py
new file mode 100644
index 0000000..1a579d6
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/cattr/preconf/msgpack.py
@@ -0,0 +1,5 @@
+"""Preconfigured converters for msgpack."""
+
+from cattrs.preconf.msgpack import MsgpackConverter, configure_converter, make_converter
+
+__all__ = ["configure_converter", "make_converter", "MsgpackConverter"]
diff --git a/lambdas/aws-dd-forwarder-3.127.0/cattr/preconf/orjson.py b/lambdas/aws-dd-forwarder-3.127.0/cattr/preconf/orjson.py
new file mode 100644
index 0000000..4450990
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/cattr/preconf/orjson.py
@@ -0,0 +1,5 @@
+"""Preconfigured converters for orjson."""
+
+from cattrs.preconf.orjson import OrjsonConverter, configure_converter, make_converter
+
+__all__ = ["configure_converter", "make_converter", "OrjsonConverter"]
diff --git a/lambdas/aws-dd-forwarder-3.127.0/cattr/preconf/pyyaml.py b/lambdas/aws-dd-forwarder-3.127.0/cattr/preconf/pyyaml.py
new file mode 100644
index 0000000..63d39f1
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/cattr/preconf/pyyaml.py
@@ -0,0 +1,5 @@
+"""Preconfigured converters for pyyaml."""
+
+from cattrs.preconf.pyyaml import PyyamlConverter, configure_converter, make_converter
+
+__all__ = ["configure_converter", "make_converter", "PyyamlConverter"]
diff --git a/lambdas/aws-dd-forwarder-3.127.0/cattr/preconf/tomlkit.py b/lambdas/aws-dd-forwarder-3.127.0/cattr/preconf/tomlkit.py
new file mode 100644
index 0000000..6add731
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/cattr/preconf/tomlkit.py
@@ -0,0 +1,5 @@
+"""Preconfigured converters for tomlkit."""
+
+from cattrs.preconf.tomlkit import TomlkitConverter, configure_converter, make_converter
+
+__all__ = ["configure_converter", "make_converter", "TomlkitConverter"]
diff --git a/lambdas/aws-dd-forwarder-3.127.0/cattr/preconf/ujson.py b/lambdas/aws-dd-forwarder-3.127.0/cattr/preconf/ujson.py
new file mode 100644
index 0000000..ef85c47
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/cattr/preconf/ujson.py
@@ -0,0 +1,5 @@
+"""Preconfigured converters for ujson."""
+
+from cattrs.preconf.ujson import UjsonConverter, configure_converter, make_converter
+
+__all__ = ["configure_converter", "make_converter", "UjsonConverter"]
diff --git a/lambdas/aws-dd-forwarder-3.127.0/cattr/py.typed b/lambdas/aws-dd-forwarder-3.127.0/cattr/py.typed
new file mode 100644
index 0000000..e69de29
diff --git a/lambdas/aws-dd-forwarder-3.127.0/cattrs-24.1.2.dist-info/INSTALLER b/lambdas/aws-dd-forwarder-3.127.0/cattrs-24.1.2.dist-info/INSTALLER
new file mode 100644
index 0000000..a1b589e
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/cattrs-24.1.2.dist-info/INSTALLER
@@ -0,0 +1 @@
+pip
diff --git a/lambdas/aws-dd-forwarder-3.127.0/cattrs-24.1.2.dist-info/METADATA b/lambdas/aws-dd-forwarder-3.127.0/cattrs-24.1.2.dist-info/METADATA
new file mode 100644
index 0000000..0c6a750
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/cattrs-24.1.2.dist-info/METADATA
@@ -0,0 +1,161 @@
+Metadata-Version: 2.3
+Name: cattrs
+Version: 24.1.2
+Summary: Composable complex class support for attrs and dataclasses.
+Project-URL: Homepage, https://catt.rs
+Project-URL: Changelog, https://catt.rs/en/latest/history.html
+Project-URL: Bug Tracker, https://github.com/python-attrs/cattrs/issues
+Project-URL: Repository, https://github.com/python-attrs/cattrs
+Project-URL: Documentation, https://catt.rs/en/stable/
+Author-email: Tin Tvrtkovic
+License: MIT
+License-File: LICENSE
+Keywords: attrs,dataclasses,serialization
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: Intended Audience :: Developers
+Classifier: License :: OSI Approved :: MIT License
+Classifier: Programming Language :: Python :: 3.8
+Classifier: Programming Language :: Python :: 3.9
+Classifier: Programming Language :: Python :: 3.10
+Classifier: Programming Language :: Python :: 3.11
+Classifier: Programming Language :: Python :: 3.12
+Classifier: Programming Language :: Python :: Implementation :: CPython
+Classifier: Programming Language :: Python :: Implementation :: PyPy
+Classifier: Typing :: Typed
+Requires-Python: >=3.8
+Requires-Dist: attrs>=23.1.0
+Requires-Dist: exceptiongroup>=1.1.1; python_version < '3.11'
+Requires-Dist: typing-extensions!=4.6.3,>=4.1.0; python_version < '3.11'
+Provides-Extra: bson
+Requires-Dist: pymongo>=4.4.0; extra == 'bson'
+Provides-Extra: cbor2
+Requires-Dist: cbor2>=5.4.6; extra == 'cbor2'
+Provides-Extra: msgpack
+Requires-Dist: msgpack>=1.0.5; extra == 'msgpack'
+Provides-Extra: msgspec
+Requires-Dist: msgspec>=0.18.5; (implementation_name == 'cpython') and extra == 'msgspec'
+Provides-Extra: orjson
+Requires-Dist: orjson>=3.9.2; (implementation_name == 'cpython') and extra == 'orjson'
+Provides-Extra: pyyaml
+Requires-Dist: pyyaml>=6.0; extra == 'pyyaml'
+Provides-Extra: tomlkit
+Requires-Dist: tomlkit>=0.11.8; extra == 'tomlkit'
+Provides-Extra: ujson
+Requires-Dist: ujson>=5.7.0; extra == 'ujson'
+Description-Content-Type: text/markdown
+
+# *cattrs*: Flexible Object Serialization and Validation
+
+*Because validation belongs to the edges.*
+
+[![Documentation](https://img.shields.io/badge/Docs-Read%20The%20Docs-black)](https://catt.rs/)
+[![License: MIT](https://img.shields.io/badge/license-MIT-C06524)](https://github.com/hynek/stamina/blob/main/LICENSE)
+[![PyPI](https://img.shields.io/pypi/v/cattrs.svg)](https://pypi.python.org/pypi/cattrs)
+[![Supported Python Versions](https://img.shields.io/pypi/pyversions/cattrs.svg)](https://github.com/python-attrs/cattrs)
+[![Downloads](https://static.pepy.tech/badge/cattrs/month)](https://pepy.tech/project/cattrs)
+[![Coverage](https://img.shields.io/endpoint?url=https://gist.githubusercontent.com/Tinche/22405310d6a663164d894a2beab4d44d/raw/covbadge.json)](https://github.com/python-attrs/cattrs/actions/workflows/main.yml)
+
+---
+
+
+
+**cattrs** is a Swiss Army knife for (un)structuring and validating data in Python.
+In practice, that means it converts **unstructured dictionaries** into **proper classes** and back, while **validating** their contents.
+
+
+
+
+## Example
+
+
+
+_cattrs_ works best with [_attrs_](https://www.attrs.org/) classes, and [dataclasses](https://docs.python.org/3/library/dataclasses.html) where simple (un-)structuring works out of the box, even for nested data, without polluting your data model with serialization details:
+
+```python
+>>> from attrs import define
+>>> from cattrs import structure, unstructure
+>>> @define
+... class C:
+... a: int
+... b: list[str]
+>>> instance = structure({'a': 1, 'b': ['x', 'y']}, C)
+>>> instance
+C(a=1, b=['x', 'y'])
+>>> unstructure(instance)
+{'a': 1, 'b': ['x', 'y']}
+```
+
+
+
+
+Have a look at [*Why *cattrs*?*](https://catt.rs/en/latest/why.html) for more examples!
+
+
+
+## Features
+
+### Recursive Unstructuring
+
+- _attrs_ classes and dataclasses are converted into dictionaries in a way similar to `attrs.asdict()`, or into tuples in a way similar to `attrs.astuple()`.
+- Enumeration instances are converted to their values.
+- Other types are let through without conversion. This includes types such as integers, dictionaries, lists and instances of non-_attrs_ classes.
+- Custom converters for any type can be registered using `register_unstructure_hook`.
+
+
+### Recursive Structuring
+
+Converts unstructured data into structured data, recursively, according to your specification given as a type.
+The following types are supported:
+
+- `typing.Optional[T]` and its 3.10+ form, `T | None`.
+- `list[T]`, `typing.List[T]`, `typing.MutableSequence[T]`, `typing.Sequence[T]` convert to a lists.
+- `tuple` and `typing.Tuple` (both variants, `tuple[T, ...]` and `tuple[X, Y, Z]`).
+- `set[T]`, `typing.MutableSet[T]`, and `typing.Set[T]` convert to a sets.
+- `frozenset[T]`, and `typing.FrozenSet[T]` convert to a frozensets.
+- `dict[K, V]`, `typing.Dict[K, V]`, `typing.MutableMapping[K, V]`, and `typing.Mapping[K, V]` convert to a dictionaries.
+- [`typing.TypedDict`](https://docs.python.org/3/library/typing.html#typing.TypedDict), ordinary and generic.
+- [`typing.NewType`](https://docs.python.org/3/library/typing.html#newtype)
+- [PEP 695 type aliases](https://docs.python.org/3/library/typing.html#type-aliases) on 3.12+
+- _attrs_ classes with simple attributes and the usual `__init__`[^simple].
+- All _attrs_ classes and dataclasses with the usual `__init__`, if their complex attributes have type metadata.
+- Unions of supported _attrs_ classes, given that all of the classes have a unique field.
+- Unions of anything, if you provide a disambiguation function for it.
+- Custom converters for any type can be registered using `register_structure_hook`.
+
+[^simple]: Simple attributes are attributes that can be assigned unstructured data, like numbers, strings, and collections of unstructured data.
+
+
+### Batteries Included
+
+_cattrs_ comes with pre-configured converters for a number of serialization libraries, including JSON (standard library, [_orjson_](https://pypi.org/project/orjson/), [UltraJSON](https://pypi.org/project/ujson/)), [_msgpack_](https://pypi.org/project/msgpack/), [_cbor2_](https://pypi.org/project/cbor2/), [_bson_](https://pypi.org/project/bson/), [PyYAML](https://pypi.org/project/PyYAML/), [_tomlkit_](https://pypi.org/project/tomlkit/) and [_msgspec_](https://pypi.org/project/msgspec/) (supports only JSON at this time).
+
+For details, see the [cattrs.preconf package](https://catt.rs/en/stable/preconf.html).
+
+
+## Design Decisions
+
+_cattrs_ is based on a few fundamental design decisions:
+
+- Un/structuring rules are separate from the models.
+ This allows models to have a one-to-many relationship with un/structuring rules, and to create un/structuring rules for models which you do not own and you cannot change.
+ (_cattrs_ can be configured to use un/structuring rules from models using the [`use_class_methods` strategy](https://catt.rs/en/latest/strategies.html#using-class-specific-structure-and-unstructure-methods).)
+- Invent as little as possible; reuse existing ordinary Python instead.
+ For example, _cattrs_ did not have a custom exception type to group exceptions until the sanctioned Python [`exceptiongroups`](https://docs.python.org/3/library/exceptions.html#ExceptionGroup).
+ A side-effect of this design decision is that, in a lot of cases, when you're solving _cattrs_ problems you're actually learning Python instead of learning _cattrs_.
+- Resist the temptation to guess.
+ If there are two ways of solving a problem, _cattrs_ should refuse to guess and let the user configure it themselves.
+
+A foolish consistency is the hobgoblin of little minds, so these decisions can and are sometimes broken, but they have proven to be a good foundation.
+
+
+
+
+## Credits
+
+Major credits to Hynek Schlawack for creating [attrs](https://attrs.org) and its predecessor, [characteristic](https://github.com/hynek/characteristic).
+
+_cattrs_ is tested with [Hypothesis](http://hypothesis.readthedocs.io/en/latest/), by David R. MacIver.
+
+_cattrs_ is benchmarked using [perf](https://github.com/haypo/perf) and [pytest-benchmark](https://pytest-benchmark.readthedocs.io/en/latest/index.html).
+
+This package was created with [Cookiecutter](https://github.com/audreyr/cookiecutter) and the [`audreyr/cookiecutter-pypackage`](https://github.com/audreyr/cookiecutter-pypackage) project template.
diff --git a/lambdas/aws-dd-forwarder-3.127.0/cattrs-24.1.2.dist-info/RECORD b/lambdas/aws-dd-forwarder-3.127.0/cattrs-24.1.2.dist-info/RECORD
new file mode 100644
index 0000000..f7b3ae3
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/cattrs-24.1.2.dist-info/RECORD
@@ -0,0 +1,96 @@
+cattr/__init__.py,sha256=pODFKaZ7MisyHe_XPc9X6KKG73mqduHUvQO142XwijY,906
+cattr/__pycache__/__init__.cpython-311.pyc,,
+cattr/__pycache__/converters.cpython-311.pyc,,
+cattr/__pycache__/disambiguators.cpython-311.pyc,,
+cattr/__pycache__/dispatch.cpython-311.pyc,,
+cattr/__pycache__/errors.cpython-311.pyc,,
+cattr/__pycache__/gen.cpython-311.pyc,,
+cattr/converters.py,sha256=rQhY4J8r7QTZh5WICuFe4GWO1v0DS3DgQ9r569zd6jg,192
+cattr/disambiguators.py,sha256=ugD1fq1Z5x1pGu5P1lMzcT-IEi1q7IfQJIHEdmg62vM,103
+cattr/dispatch.py,sha256=uVEOgHWR9Hn5tm-wIw-bDccqrxJByVi8yRKaYyvL67k,125
+cattr/errors.py,sha256=V4RhoCObwGrlaM3oyn1H_FYxGR8iAB9dG5NxFDYM548,343
+cattr/gen.py,sha256=hWyKoZ_d2D36Jz_npspyGw8s9pWtUA69sXf0R3uOvgM,597
+cattr/preconf/__init__.py,sha256=NqPE7uhVfcP-PggkUpsbfAutMo8oHjcoB1cvjgLft-s,78
+cattr/preconf/__pycache__/__init__.cpython-311.pyc,,
+cattr/preconf/__pycache__/bson.cpython-311.pyc,,
+cattr/preconf/__pycache__/json.cpython-311.pyc,,
+cattr/preconf/__pycache__/msgpack.cpython-311.pyc,,
+cattr/preconf/__pycache__/orjson.cpython-311.pyc,,
+cattr/preconf/__pycache__/pyyaml.cpython-311.pyc,,
+cattr/preconf/__pycache__/tomlkit.cpython-311.pyc,,
+cattr/preconf/__pycache__/ujson.cpython-311.pyc,,
+cattr/preconf/bson.py,sha256=Bn4hJxac7OthGg_CR4LCPeBp_fz4kx3QniBVOZhguGs,195
+cattr/preconf/json.py,sha256=HBxWOTqKI7HOlmt-GnN6_wjQz1VphRi70sAOEbx0A2Y,206
+cattr/preconf/msgpack.py,sha256=VXqynPel11_lX8uTg84-u27LQhCqL1OoiF-lTqnoAkQ,207
+cattr/preconf/orjson.py,sha256=fs8qDPDYSBba9D8ib9Df1WVZ8iZaRPQq7kDigAxp14E,203
+cattr/preconf/pyyaml.py,sha256=lhuKwHrcvr16WOtdW4Q0mgIRzB90v1hwZkFXtPKOvAw,203
+cattr/preconf/tomlkit.py,sha256=rk393txIBHeWR66LfnATPh9Im1EFAHPJvSEGGSP2c-8,207
+cattr/preconf/ujson.py,sha256=r6ufraKDqmKdetNZUKxLYVSGmuJ-ckc-UjGYvCamr9k,199
+cattr/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+cattrs-24.1.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
+cattrs-24.1.2.dist-info/METADATA,sha256=Dw1BXPd1jf0ooO8yiPhPNKrkXvGklnIuiYPdELv-Ohk,8420
+cattrs-24.1.2.dist-info/RECORD,,
+cattrs-24.1.2.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+cattrs-24.1.2.dist-info/WHEEL,sha256=1yFddiXMmvYK7QYTqtRNtX66WJ0Mz8PYEiEUoOUUxRY,87
+cattrs-24.1.2.dist-info/licenses/LICENSE,sha256=9fudHt43qIykf0IMSZ3KD0oFvJk-Esd9I1IKrSkcAb8,1074
+cattrs/__init__.py,sha256=peO0_Q9AEguPCMjXlRH-Nj0CahcCw5CJmpnpKxsWKSQ,1835
+cattrs/__pycache__/__init__.cpython-311.pyc,,
+cattrs/__pycache__/_compat.cpython-311.pyc,,
+cattrs/__pycache__/_generics.cpython-311.pyc,,
+cattrs/__pycache__/cols.cpython-311.pyc,,
+cattrs/__pycache__/converters.cpython-311.pyc,,
+cattrs/__pycache__/disambiguators.cpython-311.pyc,,
+cattrs/__pycache__/dispatch.cpython-311.pyc,,
+cattrs/__pycache__/errors.cpython-311.pyc,,
+cattrs/__pycache__/fns.cpython-311.pyc,,
+cattrs/__pycache__/v.cpython-311.pyc,,
+cattrs/_compat.py,sha256=DmHUZNi_MnI2UKvNPxwr77zuMs5tl3zDM4rdJK7kJiI,17620
+cattrs/_generics.py,sha256=ymyDdLjXoYi_XPBA_f_-xJC7Bc8RGqoUcdlwTbB7xl8,718
+cattrs/cols.py,sha256=sB9NTOp8pGLMUxVicSHWpcX_4czrD1g5MdCJO0Ko5s0,8433
+cattrs/converters.py,sha256=nMxuapDj3Q75oW4sVXnYdIeHhodwzLNUcDcaIfKMLQM,53916
+cattrs/disambiguators.py,sha256=ljl73QtSB3MAGcl7-phAUR66b4yx_1ORYLb5fUgW8bY,6825
+cattrs/dispatch.py,sha256=fEE100tCqcqC_wl5y2FCdVEocLOuDlys0sduJrTfmB4,6810
+cattrs/errors.py,sha256=rHps9Qp7SoRafb2VuAkMbhsQf4pq87gX1SzM-jluMsE,4070
+cattrs/fns.py,sha256=xQceStzW4qLiMTJgGM-pVUudGwHm0Hin8oCYe1feS5c,633
+cattrs/gen/__init__.py,sha256=yBOs4V1SQ6RAPFSGyIkwi4ZEU7fqA_nQrH6ujgT88eI,38527
+cattrs/gen/__pycache__/__init__.cpython-311.pyc,,
+cattrs/gen/__pycache__/_consts.cpython-311.pyc,,
+cattrs/gen/__pycache__/_generics.cpython-311.pyc,,
+cattrs/gen/__pycache__/_lc.cpython-311.pyc,,
+cattrs/gen/__pycache__/_shared.cpython-311.pyc,,
+cattrs/gen/__pycache__/typeddicts.cpython-311.pyc,,
+cattrs/gen/_consts.py,sha256=ZwT_m2J3S7p-UjltpbA1WtfQZLNj9KhmFYCAv6Zl-g0,511
+cattrs/gen/_generics.py,sha256=_DyXCGql2QIxGhAv3_B1hsi80uPK8PhK2hhZa95YOlo,3011
+cattrs/gen/_lc.py,sha256=ktP5F9oOUo4HpZ4-hlLliLPzr8XjFi31EXMl8YMMs-g,906
+cattrs/gen/_shared.py,sha256=4yX9-TD5yyVzDWlSjkECrQV5B82xHUeBt9n2N5UgOAE,2064
+cattrs/gen/typeddicts.py,sha256=C3Bp8tNM-MI7L7KO0X3sfwSkG5d0ua3j7qDtvcCEBQk,22004
+cattrs/preconf/__init__.py,sha256=dfkUXoU47ZJfmoKX9FsnARKqLlgJeBjMxORMzxrbKbs,604
+cattrs/preconf/__pycache__/__init__.cpython-311.pyc,,
+cattrs/preconf/__pycache__/bson.cpython-311.pyc,,
+cattrs/preconf/__pycache__/cbor2.cpython-311.pyc,,
+cattrs/preconf/__pycache__/json.cpython-311.pyc,,
+cattrs/preconf/__pycache__/msgpack.cpython-311.pyc,,
+cattrs/preconf/__pycache__/msgspec.cpython-311.pyc,,
+cattrs/preconf/__pycache__/orjson.cpython-311.pyc,,
+cattrs/preconf/__pycache__/pyyaml.cpython-311.pyc,,
+cattrs/preconf/__pycache__/tomlkit.cpython-311.pyc,,
+cattrs/preconf/__pycache__/ujson.cpython-311.pyc,,
+cattrs/preconf/bson.py,sha256=uBRpTVfwGZ-qfuDYGwsl8eXokVAmcVBedKQPGUmamhc,3656
+cattrs/preconf/cbor2.py,sha256=ANfQUXgs7pyU5-4_2hYmcqUxzQZhWhFzrk_0y6b1yYw,1635
+cattrs/preconf/json.py,sha256=CSU5RosdYyg6cIOpaohgZVfdMtOtKjZlSg837fW4fTw,2035
+cattrs/preconf/msgpack.py,sha256=cgwX_ARi_swQjG6hwa9j-n7FUynLNWIMVLouz_VoTuw,1753
+cattrs/preconf/msgspec.py,sha256=f8J04RXv8UErKAwwzVs1cMbvoM-9erMmmF49zKBbCDo,6343
+cattrs/preconf/orjson.py,sha256=RZ8DI-4K7Xi0QdpIihT9I3Cm-O8Aq8_MTt2R3a4fgEk,3241
+cattrs/preconf/pyyaml.py,sha256=Ga96zLypn2DglTgbrb9h3jcuH-caur_UQI1ADo-ynUA,2298
+cattrs/preconf/tomlkit.py,sha256=2k-BN0ZW3faWmHcMQ1bCvsKCClhdgSjTe056O1xEc4o,3060
+cattrs/preconf/ujson.py,sha256=JBh5dWluwMwKhAJPINJhpse_aQ1p9hzrGo8BuvmG6S0,1863
+cattrs/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+cattrs/strategies/__init__.py,sha256=nkZWCzSRYcS-75FMfk52mioZSuWykaN8hB39Vig5Xkg,339
+cattrs/strategies/__pycache__/__init__.cpython-311.pyc,,
+cattrs/strategies/__pycache__/_class_methods.cpython-311.pyc,,
+cattrs/strategies/__pycache__/_subclasses.cpython-311.pyc,,
+cattrs/strategies/__pycache__/_unions.cpython-311.pyc,,
+cattrs/strategies/_class_methods.py,sha256=vfiE3wKm04oc-3T9hchsIyhVzpMpJRdgTbujKsWyVpQ,2597
+cattrs/strategies/_subclasses.py,sha256=zzhLl7fSZlmlBuBY-rPX7L1d_C5tiDFDBmUTeRpG2uI,9204
+cattrs/strategies/_unions.py,sha256=l8CjVVFAwftkBa47g3m2KgtQ_b42Wnv-KwYY_LHReCA,9166
+cattrs/v.py,sha256=cTYt0EW8yr-gzKynw4_XjFv3RLpAF8IebvOb612l9QE,4399
diff --git a/lambdas/aws-dd-forwarder-3.127.0/cattrs-24.1.2.dist-info/REQUESTED b/lambdas/aws-dd-forwarder-3.127.0/cattrs-24.1.2.dist-info/REQUESTED
new file mode 100644
index 0000000..e69de29
diff --git a/lambdas/aws-dd-forwarder-3.127.0/cattrs-24.1.2.dist-info/WHEEL b/lambdas/aws-dd-forwarder-3.127.0/cattrs-24.1.2.dist-info/WHEEL
new file mode 100644
index 0000000..cdd68a4
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/cattrs-24.1.2.dist-info/WHEEL
@@ -0,0 +1,4 @@
+Wheel-Version: 1.0
+Generator: hatchling 1.25.0
+Root-Is-Purelib: true
+Tag: py3-none-any
diff --git a/lambdas/aws-dd-forwarder-3.127.0/cattrs-24.1.2.dist-info/licenses/LICENSE b/lambdas/aws-dd-forwarder-3.127.0/cattrs-24.1.2.dist-info/licenses/LICENSE
new file mode 100644
index 0000000..340022c
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/cattrs-24.1.2.dist-info/licenses/LICENSE
@@ -0,0 +1,11 @@
+
+MIT License
+
+Copyright (c) 2016, Tin Tvrtković
+
+Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+
diff --git a/lambdas/aws-dd-forwarder-3.127.0/cattrs/.DS_Store b/lambdas/aws-dd-forwarder-3.127.0/cattrs/.DS_Store
new file mode 100644
index 0000000..5aad9a2
Binary files /dev/null and b/lambdas/aws-dd-forwarder-3.127.0/cattrs/.DS_Store differ
diff --git a/lambdas/aws-dd-forwarder-3.127.0/cattrs/__init__.py b/lambdas/aws-dd-forwarder-3.127.0/cattrs/__init__.py
new file mode 100644
index 0000000..db49636
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/cattrs/__init__.py
@@ -0,0 +1,55 @@
+from typing import Final
+
+from .converters import BaseConverter, Converter, GenConverter, UnstructureStrategy
+from .errors import (
+ AttributeValidationNote,
+ BaseValidationError,
+ ClassValidationError,
+ ForbiddenExtraKeysError,
+ IterableValidationError,
+ IterableValidationNote,
+ StructureHandlerNotFoundError,
+)
+from .gen import override
+from .v import transform_error
+
+__all__ = [
+ "structure",
+ "unstructure",
+ "get_structure_hook",
+ "get_unstructure_hook",
+ "register_structure_hook_func",
+ "register_structure_hook",
+ "register_unstructure_hook_func",
+ "register_unstructure_hook",
+ "structure_attrs_fromdict",
+ "structure_attrs_fromtuple",
+ "global_converter",
+ "BaseConverter",
+ "Converter",
+ "AttributeValidationNote",
+ "BaseValidationError",
+ "ClassValidationError",
+ "ForbiddenExtraKeysError",
+ "GenConverter",
+ "IterableValidationError",
+ "IterableValidationNote",
+ "override",
+ "StructureHandlerNotFoundError",
+ "transform_error",
+ "UnstructureStrategy",
+]
+
+#: The global converter. Prefer creating your own if customizations are required.
+global_converter: Final = Converter()
+
+unstructure = global_converter.unstructure
+structure = global_converter.structure
+structure_attrs_fromtuple = global_converter.structure_attrs_fromtuple
+structure_attrs_fromdict = global_converter.structure_attrs_fromdict
+register_structure_hook = global_converter.register_structure_hook
+register_structure_hook_func = global_converter.register_structure_hook_func
+register_unstructure_hook = global_converter.register_unstructure_hook
+register_unstructure_hook_func = global_converter.register_unstructure_hook_func
+get_structure_hook: Final = global_converter.get_structure_hook
+get_unstructure_hook: Final = global_converter.get_unstructure_hook
diff --git a/lambdas/aws-dd-forwarder-3.127.0/cattrs/_compat.py b/lambdas/aws-dd-forwarder-3.127.0/cattrs/_compat.py
new file mode 100644
index 0000000..027ef47
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/cattrs/_compat.py
@@ -0,0 +1,578 @@
+import sys
+from collections import deque
+from collections.abc import Mapping as AbcMapping
+from collections.abc import MutableMapping as AbcMutableMapping
+from collections.abc import MutableSet as AbcMutableSet
+from collections.abc import Set as AbcSet
+from dataclasses import MISSING, Field, is_dataclass
+from dataclasses import fields as dataclass_fields
+from functools import partial
+from inspect import signature as _signature
+from typing import AbstractSet as TypingAbstractSet
+from typing import (
+ Any,
+ Deque,
+ Dict,
+ Final,
+ FrozenSet,
+ List,
+ Literal,
+ NewType,
+ Optional,
+ Protocol,
+ Tuple,
+ Type,
+ Union,
+ get_args,
+ get_origin,
+ get_type_hints,
+)
+from typing import Mapping as TypingMapping
+from typing import MutableMapping as TypingMutableMapping
+from typing import MutableSequence as TypingMutableSequence
+from typing import MutableSet as TypingMutableSet
+from typing import Sequence as TypingSequence
+from typing import Set as TypingSet
+
+from attrs import NOTHING, Attribute, Factory, resolve_types
+from attrs import fields as attrs_fields
+from attrs import fields_dict as attrs_fields_dict
+
+__all__ = [
+ "ANIES",
+ "adapted_fields",
+ "fields_dict",
+ "ExceptionGroup",
+ "ExtensionsTypedDict",
+ "get_type_alias_base",
+ "has",
+ "is_type_alias",
+ "is_typeddict",
+ "TypeAlias",
+ "TypedDict",
+]
+
+try:
+ from typing_extensions import TypedDict as ExtensionsTypedDict
+except ImportError: # pragma: no cover
+ ExtensionsTypedDict = None
+
+if sys.version_info >= (3, 11):
+ from builtins import ExceptionGroup
+else:
+ from exceptiongroup import ExceptionGroup
+
+try:
+ from typing_extensions import is_typeddict as _is_typeddict
+except ImportError: # pragma: no cover
+ assert sys.version_info >= (3, 10)
+ from typing import is_typeddict as _is_typeddict
+
+try:
+ from typing_extensions import TypeAlias
+except ImportError: # pragma: no cover
+ assert sys.version_info >= (3, 11)
+ from typing import TypeAlias
+
+LITERALS = {Literal}
+try:
+ from typing_extensions import Literal as teLiteral
+
+ LITERALS.add(teLiteral)
+except ImportError: # pragma: no cover
+ pass
+
+# On some Python versions, `typing_extensions.Any` is different than
+# `typing.Any`.
+try:
+ from typing_extensions import Any as teAny
+
+ ANIES = frozenset([Any, teAny])
+except ImportError: # pragma: no cover
+ ANIES = frozenset([Any])
+
+NoneType = type(None)
+
+
+def is_optional(typ: Type) -> bool:
+ return is_union_type(typ) and NoneType in typ.__args__ and len(typ.__args__) == 2
+
+
+def is_typeddict(cls):
+ """Thin wrapper around typing(_extensions).is_typeddict"""
+ return _is_typeddict(getattr(cls, "__origin__", cls))
+
+
+def is_type_alias(type: Any) -> bool:
+ """Is this a PEP 695 type alias?"""
+ return False
+
+
+def get_type_alias_base(type: Any) -> Any:
+ """
+ What is this a type alias of?
+
+ Works only on 3.12+.
+ """
+ return type.__value__
+
+
+def has(cls):
+ return hasattr(cls, "__attrs_attrs__") or hasattr(cls, "__dataclass_fields__")
+
+
+def has_with_generic(cls):
+ """Test whether the class if a normal or generic attrs or dataclass."""
+ return has(cls) or has(get_origin(cls))
+
+
+def fields(type):
+ try:
+ return type.__attrs_attrs__
+ except AttributeError:
+ return dataclass_fields(type)
+
+
+def fields_dict(type) -> Dict[str, Union[Attribute, Field]]:
+ """Return the fields_dict for attrs and dataclasses."""
+ if is_dataclass(type):
+ return {f.name: f for f in dataclass_fields(type)}
+ return attrs_fields_dict(type)
+
+
+def adapted_fields(cl) -> List[Attribute]:
+ """Return the attrs format of `fields()` for attrs and dataclasses."""
+ if is_dataclass(cl):
+ attrs = dataclass_fields(cl)
+ if any(isinstance(a.type, str) for a in attrs):
+ # Do this conditionally in case `get_type_hints` fails, so
+ # users can resolve on their own first.
+ type_hints = get_type_hints(cl)
+ else:
+ type_hints = {}
+ return [
+ Attribute(
+ attr.name,
+ (
+ attr.default
+ if attr.default is not MISSING
+ else (
+ Factory(attr.default_factory)
+ if attr.default_factory is not MISSING
+ else NOTHING
+ )
+ ),
+ None,
+ True,
+ None,
+ True,
+ attr.init,
+ True,
+ type=type_hints.get(attr.name, attr.type),
+ alias=attr.name,
+ )
+ for attr in attrs
+ ]
+ attribs = attrs_fields(cl)
+ if any(isinstance(a.type, str) for a in attribs):
+ # PEP 563 annotations - need to be resolved.
+ resolve_types(cl)
+ attribs = attrs_fields(cl)
+ return attribs
+
+
+def is_subclass(obj: type, bases) -> bool:
+ """A safe version of issubclass (won't raise)."""
+ try:
+ return issubclass(obj, bases)
+ except TypeError:
+ return False
+
+
+def is_hetero_tuple(type: Any) -> bool:
+ origin = getattr(type, "__origin__", None)
+ return origin is tuple and ... not in type.__args__
+
+
+def is_protocol(type: Any) -> bool:
+ return is_subclass(type, Protocol) and getattr(type, "_is_protocol", False)
+
+
+def is_bare_final(type) -> bool:
+ return type is Final
+
+
+def get_final_base(type) -> Optional[type]:
+ """Return the base of the Final annotation, if it is Final."""
+ if type is Final:
+ return Any
+ if type.__class__ is _GenericAlias and type.__origin__ is Final:
+ return type.__args__[0]
+ return None
+
+
+OriginAbstractSet = AbcSet
+OriginMutableSet = AbcMutableSet
+
+signature = _signature
+
+if sys.version_info >= (3, 10):
+ signature = partial(_signature, eval_str=True)
+
+if sys.version_info >= (3, 9):
+ from collections import Counter
+ from collections.abc import MutableSequence as AbcMutableSequence
+ from collections.abc import MutableSet as AbcMutableSet
+ from collections.abc import Sequence as AbcSequence
+ from collections.abc import Set as AbcSet
+ from types import GenericAlias
+ from typing import (
+ Annotated,
+ Generic,
+ TypedDict,
+ Union,
+ _AnnotatedAlias,
+ _GenericAlias,
+ _SpecialGenericAlias,
+ _UnionGenericAlias,
+ )
+ from typing import Counter as TypingCounter
+
+ try:
+ # Not present on 3.9.0, so we try carefully.
+ from typing import _LiteralGenericAlias
+
+ def is_literal(type) -> bool:
+ return type in LITERALS or (
+ isinstance(
+ type, (_GenericAlias, _LiteralGenericAlias, _SpecialGenericAlias)
+ )
+ and type.__origin__ in LITERALS
+ )
+
+ except ImportError: # pragma: no cover
+
+ def is_literal(_) -> bool:
+ return False
+
+ Set = AbcSet
+ AbstractSet = AbcSet
+ MutableSet = AbcMutableSet
+ Sequence = AbcSequence
+ MutableSequence = AbcMutableSequence
+ MutableMapping = AbcMutableMapping
+ Mapping = AbcMapping
+ FrozenSetSubscriptable = frozenset
+ TupleSubscriptable = tuple
+
+ def is_annotated(type) -> bool:
+ return getattr(type, "__class__", None) is _AnnotatedAlias
+
+ def is_tuple(type):
+ return (
+ type in (Tuple, tuple)
+ or (type.__class__ is _GenericAlias and is_subclass(type.__origin__, Tuple))
+ or (getattr(type, "__origin__", None) is tuple)
+ )
+
+ if sys.version_info >= (3, 12):
+ from typing import TypeAliasType
+
+ def is_type_alias(type: Any) -> bool:
+ """Is this a PEP 695 type alias?"""
+ return isinstance(type, TypeAliasType)
+
+ if sys.version_info >= (3, 10):
+
+ def is_union_type(obj):
+ from types import UnionType
+
+ return (
+ obj is Union
+ or (isinstance(obj, _UnionGenericAlias) and obj.__origin__ is Union)
+ or isinstance(obj, UnionType)
+ )
+
+ def get_newtype_base(typ: Any) -> Optional[type]:
+ if typ is NewType or isinstance(typ, NewType):
+ return typ.__supertype__
+ return None
+
+ if sys.version_info >= (3, 11):
+ from typing import NotRequired, Required
+ else:
+ from typing_extensions import NotRequired, Required
+
+ else:
+ from typing_extensions import NotRequired, Required
+
+ def is_union_type(obj):
+ return (
+ obj is Union
+ or isinstance(obj, _UnionGenericAlias)
+ and obj.__origin__ is Union
+ )
+
+ def get_newtype_base(typ: Any) -> Optional[type]:
+ supertype = getattr(typ, "__supertype__", None)
+ if (
+ supertype is not None
+ and getattr(typ, "__qualname__", "") == "NewType..new_type"
+ and typ.__module__ in ("typing", "typing_extensions")
+ ):
+ return supertype
+ return None
+
+ def get_notrequired_base(type) -> "Union[Any, Literal[NOTHING]]":
+ if is_annotated(type):
+ # Handle `Annotated[NotRequired[int]]`
+ type = get_args(type)[0]
+ if get_origin(type) in (NotRequired, Required):
+ return get_args(type)[0]
+ return NOTHING
+
+ def is_sequence(type: Any) -> bool:
+ """A predicate function for sequences.
+
+ Matches lists, sequences, mutable sequences, deques and homogenous
+ tuples.
+ """
+ origin = getattr(type, "__origin__", None)
+ return (
+ type
+ in (
+ List,
+ list,
+ TypingSequence,
+ TypingMutableSequence,
+ AbcMutableSequence,
+ tuple,
+ Tuple,
+ deque,
+ Deque,
+ )
+ or (
+ type.__class__ is _GenericAlias
+ and (
+ (origin is not tuple)
+ and is_subclass(origin, TypingSequence)
+ or origin is tuple
+ and type.__args__[1] is ...
+ )
+ )
+ or (origin in (list, deque, AbcMutableSequence, AbcSequence))
+ or (origin is tuple and type.__args__[1] is ...)
+ )
+
+ def is_deque(type):
+ return (
+ type in (deque, Deque)
+ or (type.__class__ is _GenericAlias and is_subclass(type.__origin__, deque))
+ or (getattr(type, "__origin__", None) is deque)
+ )
+
+ def is_mutable_set(type: Any) -> bool:
+ """A predicate function for (mutable) sets.
+
+ Matches built-in sets and sets from the typing module.
+ """
+ return (
+ type in (TypingSet, TypingMutableSet, set)
+ or (
+ type.__class__ is _GenericAlias
+ and is_subclass(type.__origin__, TypingMutableSet)
+ )
+ or (getattr(type, "__origin__", None) in (set, AbcMutableSet, AbcSet))
+ )
+
+ def is_frozenset(type: Any) -> bool:
+ """A predicate function for frozensets.
+
+ Matches built-in frozensets and frozensets from the typing module.
+ """
+ return (
+ type in (FrozenSet, frozenset)
+ or (
+ type.__class__ is _GenericAlias
+ and is_subclass(type.__origin__, FrozenSet)
+ )
+ or (getattr(type, "__origin__", None) is frozenset)
+ )
+
+ def is_bare(type):
+ return isinstance(type, _SpecialGenericAlias) or (
+ not hasattr(type, "__origin__") and not hasattr(type, "__args__")
+ )
+
+ def is_mapping(type: Any) -> bool:
+ """A predicate function for mappings."""
+ return (
+ type in (dict, Dict, TypingMapping, TypingMutableMapping, AbcMutableMapping)
+ or (
+ type.__class__ is _GenericAlias
+ and is_subclass(type.__origin__, TypingMapping)
+ )
+ or is_subclass(
+ getattr(type, "__origin__", type), (dict, AbcMutableMapping, AbcMapping)
+ )
+ )
+
+ def is_counter(type):
+ return (
+ type in (Counter, TypingCounter)
+ or getattr(type, "__origin__", None) is Counter
+ )
+
+ def is_generic(type) -> bool:
+ """Whether `type` is a generic type."""
+ # Inheriting from protocol will inject `Generic` into the MRO
+ # without `__orig_bases__`.
+ return isinstance(type, (_GenericAlias, GenericAlias)) or (
+ is_subclass(type, Generic) and hasattr(type, "__orig_bases__")
+ )
+
+ def copy_with(type, args):
+ """Replace a generic type's arguments."""
+ if is_annotated(type):
+ # typing.Annotated requires a special case.
+ return Annotated[args]
+ if isinstance(args, tuple) and len(args) == 1:
+ # Some annotations can't handle 1-tuples.
+ args = args[0]
+ return type.__origin__[args]
+
+ def get_full_type_hints(obj, globalns=None, localns=None):
+ return get_type_hints(obj, globalns, localns, include_extras=True)
+
+else:
+ # 3.8
+ Set = TypingSet
+ AbstractSet = TypingAbstractSet
+ MutableSet = TypingMutableSet
+
+ Sequence = TypingSequence
+ MutableSequence = TypingMutableSequence
+ MutableMapping = TypingMutableMapping
+ Mapping = TypingMapping
+ FrozenSetSubscriptable = FrozenSet
+ TupleSubscriptable = Tuple
+
+ from collections import Counter as ColCounter
+ from typing import Counter, Generic, TypedDict, Union, _GenericAlias
+
+ from typing_extensions import Annotated, NotRequired, Required
+ from typing_extensions import get_origin as te_get_origin
+
+ def is_annotated(type) -> bool:
+ return te_get_origin(type) is Annotated
+
+ def is_tuple(type):
+ return type in (Tuple, tuple) or (
+ type.__class__ is _GenericAlias and is_subclass(type.__origin__, Tuple)
+ )
+
+ def is_union_type(obj):
+ return (
+ obj is Union or isinstance(obj, _GenericAlias) and obj.__origin__ is Union
+ )
+
+ def get_newtype_base(typ: Any) -> Optional[type]:
+ supertype = getattr(typ, "__supertype__", None)
+ if (
+ supertype is not None
+ and getattr(typ, "__qualname__", "") == "NewType..new_type"
+ and typ.__module__ in ("typing", "typing_extensions")
+ ):
+ return supertype
+ return None
+
+ def is_sequence(type: Any) -> bool:
+ return type in (List, list, Tuple, tuple) or (
+ type.__class__ is _GenericAlias
+ and (
+ type.__origin__ not in (Union, Tuple, tuple)
+ and is_subclass(type.__origin__, TypingSequence)
+ )
+ or (type.__origin__ in (Tuple, tuple) and type.__args__[1] is ...)
+ )
+
+ def is_deque(type: Any) -> bool:
+ return (
+ type in (deque, Deque)
+ or (type.__class__ is _GenericAlias and is_subclass(type.__origin__, deque))
+ or type.__origin__ is deque
+ )
+
+ def is_mutable_set(type) -> bool:
+ return type in (set, TypingAbstractSet) or (
+ type.__class__ is _GenericAlias
+ and is_subclass(type.__origin__, (MutableSet, TypingAbstractSet))
+ )
+
+ def is_frozenset(type):
+ return type is frozenset or (
+ type.__class__ is _GenericAlias and is_subclass(type.__origin__, FrozenSet)
+ )
+
+ def is_mapping(type: Any) -> bool:
+ """A predicate function for mappings."""
+ return (
+ type in (TypingMapping, dict)
+ or (
+ type.__class__ is _GenericAlias
+ and is_subclass(type.__origin__, TypingMapping)
+ )
+ or is_subclass(
+ getattr(type, "__origin__", type), (dict, AbcMutableMapping, AbcMapping)
+ )
+ )
+
+ bare_generic_args = {
+ List.__args__,
+ TypingSequence.__args__,
+ TypingMapping.__args__,
+ Dict.__args__,
+ TypingMutableSequence.__args__,
+ Tuple.__args__,
+ None, # non-parametrized containers do not have `__args__ attribute in py3.7-8
+ }
+
+ def is_bare(type):
+ return getattr(type, "__args__", None) in bare_generic_args
+
+ def is_counter(type):
+ return (
+ type in (Counter, ColCounter)
+ or getattr(type, "__origin__", None) is ColCounter
+ )
+
+ def is_literal(type) -> bool:
+ return type in LITERALS or (
+ isinstance(type, _GenericAlias) and type.__origin__ in LITERALS
+ )
+
+ def is_generic(obj):
+ return isinstance(obj, _GenericAlias) or (
+ is_subclass(obj, Generic) and hasattr(obj, "__orig_bases__")
+ )
+
+ def copy_with(type, args):
+ """Replace a generic type's arguments."""
+ return type.copy_with(args)
+
+ def get_notrequired_base(type) -> "Union[Any, Literal[NOTHING]]":
+ if is_annotated(type):
+ # Handle `Annotated[NotRequired[int]]`
+ type = get_origin(type)
+
+ if get_origin(type) in (NotRequired, Required):
+ return get_args(type)[0]
+ return NOTHING
+
+ def get_full_type_hints(obj, globalns=None, localns=None):
+ return get_type_hints(obj, globalns, localns)
+
+
+def is_generic_attrs(type) -> bool:
+ """Return True for both specialized (A[int]) and unspecialized (A) generics."""
+ return is_generic(type) and has(type.__origin__)
diff --git a/lambdas/aws-dd-forwarder-3.127.0/cattrs/_generics.py b/lambdas/aws-dd-forwarder-3.127.0/cattrs/_generics.py
new file mode 100644
index 0000000..c473f43
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/cattrs/_generics.py
@@ -0,0 +1,24 @@
+from typing import Any, Mapping
+
+from ._compat import copy_with, get_args, is_annotated, is_generic
+
+
+def deep_copy_with(t, mapping: Mapping[str, Any]):
+ args = get_args(t)
+ rest = ()
+ if is_annotated(t) and args:
+ # If we're dealing with `Annotated`, we only map the first type parameter
+ rest = tuple(args[1:])
+ args = (args[0],)
+ new_args = (
+ tuple(
+ (
+ mapping[a.__name__]
+ if hasattr(a, "__name__") and a.__name__ in mapping
+ else (deep_copy_with(a, mapping) if is_generic(a) else a)
+ )
+ for a in args
+ )
+ + rest
+ )
+ return copy_with(t, new_args) if new_args != args else t
diff --git a/lambdas/aws-dd-forwarder-3.127.0/cattrs/cols.py b/lambdas/aws-dd-forwarder-3.127.0/cattrs/cols.py
new file mode 100644
index 0000000..8ff5c0f
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/cattrs/cols.py
@@ -0,0 +1,289 @@
+"""Utility functions for collections."""
+
+from __future__ import annotations
+
+from sys import version_info
+from typing import (
+ TYPE_CHECKING,
+ Any,
+ Iterable,
+ Literal,
+ NamedTuple,
+ Tuple,
+ TypeVar,
+ get_type_hints,
+)
+
+from attrs import NOTHING, Attribute
+
+from ._compat import ANIES, is_bare, is_frozenset, is_mapping, is_sequence, is_subclass
+from ._compat import is_mutable_set as is_set
+from .dispatch import StructureHook, UnstructureHook
+from .errors import IterableValidationError, IterableValidationNote
+from .fns import identity
+from .gen import (
+ AttributeOverride,
+ already_generating,
+ make_dict_structure_fn_from_attrs,
+ make_dict_unstructure_fn_from_attrs,
+ make_hetero_tuple_unstructure_fn,
+ mapping_structure_factory,
+)
+from .gen import make_iterable_unstructure_fn as iterable_unstructure_factory
+
+if TYPE_CHECKING:
+ from .converters import BaseConverter
+
+__all__ = [
+ "is_any_set",
+ "is_frozenset",
+ "is_namedtuple",
+ "is_mapping",
+ "is_set",
+ "is_sequence",
+ "iterable_unstructure_factory",
+ "list_structure_factory",
+ "namedtuple_structure_factory",
+ "namedtuple_unstructure_factory",
+ "namedtuple_dict_structure_factory",
+ "namedtuple_dict_unstructure_factory",
+ "mapping_structure_factory",
+]
+
+
+def is_any_set(type) -> bool:
+ """A predicate function for both mutable and frozensets."""
+ return is_set(type) or is_frozenset(type)
+
+
+if version_info[:2] >= (3, 9):
+
+ def is_namedtuple(type: Any) -> bool:
+ """A predicate function for named tuples."""
+
+ if is_subclass(type, tuple):
+ for cl in type.mro():
+ orig_bases = cl.__dict__.get("__orig_bases__", ())
+ if NamedTuple in orig_bases:
+ return True
+ return False
+
+else:
+
+ def is_namedtuple(type: Any) -> bool:
+ """A predicate function for named tuples."""
+ # This is tricky. It may not be possible for this function to be 100%
+ # accurate, since it doesn't seem like we can distinguish between tuple
+ # subclasses and named tuples reliably.
+
+ if is_subclass(type, tuple):
+ for cl in type.mro():
+ if cl is tuple:
+ # No point going further.
+ break
+ if "_fields" in cl.__dict__:
+ return True
+ return False
+
+
+def _is_passthrough(type: type[tuple], converter: BaseConverter) -> bool:
+ """If all fields would be passed through, this class should not be processed
+ either.
+ """
+ return all(
+ converter.get_unstructure_hook(t) == identity
+ for t in type.__annotations__.values()
+ )
+
+
+T = TypeVar("T")
+
+
+def list_structure_factory(type: type, converter: BaseConverter) -> StructureHook:
+ """A hook factory for structuring lists.
+
+ Converts any given iterable into a list.
+ """
+
+ if is_bare(type) or type.__args__[0] in ANIES:
+
+ def structure_list(obj: Iterable[T], _: type = type) -> list[T]:
+ return list(obj)
+
+ return structure_list
+
+ elem_type = type.__args__[0]
+
+ try:
+ handler = converter.get_structure_hook(elem_type)
+ except RecursionError:
+ # Break the cycle by using late binding.
+ handler = converter.structure
+
+ if converter.detailed_validation:
+
+ def structure_list(
+ obj: Iterable[T], _: type = type, _handler=handler, _elem_type=elem_type
+ ) -> list[T]:
+ errors = []
+ res = []
+ ix = 0 # Avoid `enumerate` for performance.
+ for e in obj:
+ try:
+ res.append(handler(e, _elem_type))
+ except Exception as e:
+ msg = IterableValidationNote(
+ f"Structuring {type} @ index {ix}", ix, elem_type
+ )
+ e.__notes__ = [*getattr(e, "__notes__", []), msg]
+ errors.append(e)
+ finally:
+ ix += 1
+ if errors:
+ raise IterableValidationError(
+ f"While structuring {type!r}", errors, type
+ )
+
+ return res
+
+ else:
+
+ def structure_list(
+ obj: Iterable[T], _: type = type, _handler=handler, _elem_type=elem_type
+ ) -> list[T]:
+ return [_handler(e, _elem_type) for e in obj]
+
+ return structure_list
+
+
+def namedtuple_unstructure_factory(
+ cl: type[tuple], converter: BaseConverter, unstructure_to: Any = None
+) -> UnstructureHook:
+ """A hook factory for unstructuring namedtuples.
+
+ :param unstructure_to: Force unstructuring to this type, if provided.
+ """
+
+ if unstructure_to is None and _is_passthrough(cl, converter):
+ return identity
+
+ return make_hetero_tuple_unstructure_fn(
+ cl,
+ converter,
+ unstructure_to=tuple if unstructure_to is None else unstructure_to,
+ type_args=tuple(cl.__annotations__.values()),
+ )
+
+
+def namedtuple_structure_factory(
+ cl: type[tuple], converter: BaseConverter
+) -> StructureHook:
+ """A hook factory for structuring namedtuples from iterables."""
+ # We delegate to the existing infrastructure for heterogenous tuples.
+ hetero_tuple_type = Tuple[tuple(cl.__annotations__.values())]
+ base_hook = converter.get_structure_hook(hetero_tuple_type)
+ return lambda v, _: cl(*base_hook(v, hetero_tuple_type))
+
+
+def _namedtuple_to_attrs(cl: type[tuple]) -> list[Attribute]:
+ """Generate pseudo attributes for a namedtuple."""
+ return [
+ Attribute(
+ name,
+ cl._field_defaults.get(name, NOTHING),
+ None,
+ False,
+ False,
+ False,
+ True,
+ False,
+ type=a,
+ alias=name,
+ )
+ for name, a in get_type_hints(cl).items()
+ ]
+
+
+def namedtuple_dict_structure_factory(
+ cl: type[tuple],
+ converter: BaseConverter,
+ detailed_validation: bool | Literal["from_converter"] = "from_converter",
+ forbid_extra_keys: bool = False,
+ use_linecache: bool = True,
+ /,
+ **kwargs: AttributeOverride,
+) -> StructureHook:
+ """A hook factory for hooks structuring namedtuples from dictionaries.
+
+ :param forbid_extra_keys: Whether the hook should raise a `ForbiddenExtraKeysError`
+ if unknown keys are encountered.
+ :param use_linecache: Whether to store the source code in the Python linecache.
+
+ .. versionadded:: 24.1.0
+ """
+ try:
+ working_set = already_generating.working_set
+ except AttributeError:
+ working_set = set()
+ already_generating.working_set = working_set
+ else:
+ if cl in working_set:
+ raise RecursionError()
+
+ working_set.add(cl)
+
+ try:
+ return make_dict_structure_fn_from_attrs(
+ _namedtuple_to_attrs(cl),
+ cl,
+ converter,
+ _cattrs_forbid_extra_keys=forbid_extra_keys,
+ _cattrs_use_detailed_validation=detailed_validation,
+ _cattrs_use_linecache=use_linecache,
+ **kwargs,
+ )
+ finally:
+ working_set.remove(cl)
+ if not working_set:
+ del already_generating.working_set
+
+
+def namedtuple_dict_unstructure_factory(
+ cl: type[tuple],
+ converter: BaseConverter,
+ omit_if_default: bool = False,
+ use_linecache: bool = True,
+ /,
+ **kwargs: AttributeOverride,
+) -> UnstructureHook:
+ """A hook factory for hooks unstructuring namedtuples to dictionaries.
+
+ :param omit_if_default: When true, attributes equal to their default values
+ will be omitted in the result dictionary.
+ :param use_linecache: Whether to store the source code in the Python linecache.
+
+ .. versionadded:: 24.1.0
+ """
+ try:
+ working_set = already_generating.working_set
+ except AttributeError:
+ working_set = set()
+ already_generating.working_set = working_set
+ if cl in working_set:
+ raise RecursionError()
+
+ working_set.add(cl)
+
+ try:
+ return make_dict_unstructure_fn_from_attrs(
+ _namedtuple_to_attrs(cl),
+ cl,
+ converter,
+ _cattrs_omit_if_default=omit_if_default,
+ _cattrs_use_linecache=use_linecache,
+ **kwargs,
+ )
+ finally:
+ working_set.remove(cl)
+ if not working_set:
+ del already_generating.working_set
diff --git a/lambdas/aws-dd-forwarder-3.127.0/cattrs/converters.py b/lambdas/aws-dd-forwarder-3.127.0/cattrs/converters.py
new file mode 100644
index 0000000..1490ec2
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/cattrs/converters.py
@@ -0,0 +1,1419 @@
+from __future__ import annotations
+
+from collections import Counter, deque
+from collections.abc import Mapping as AbcMapping
+from collections.abc import MutableMapping as AbcMutableMapping
+from collections.abc import MutableSet as AbcMutableSet
+from dataclasses import Field
+from enum import Enum
+from inspect import Signature
+from inspect import signature as inspect_signature
+from pathlib import Path
+from typing import Any, Callable, Iterable, Optional, Tuple, TypeVar, overload
+
+from attrs import Attribute, resolve_types
+from attrs import has as attrs_has
+
+from ._compat import (
+ ANIES,
+ FrozenSetSubscriptable,
+ Mapping,
+ MutableMapping,
+ MutableSequence,
+ NoneType,
+ OriginAbstractSet,
+ OriginMutableSet,
+ Sequence,
+ Set,
+ TypeAlias,
+ fields,
+ get_final_base,
+ get_newtype_base,
+ get_origin,
+ get_type_alias_base,
+ has,
+ has_with_generic,
+ is_annotated,
+ is_bare,
+ is_counter,
+ is_deque,
+ is_frozenset,
+ is_generic,
+ is_generic_attrs,
+ is_hetero_tuple,
+ is_literal,
+ is_mapping,
+ is_mutable_set,
+ is_optional,
+ is_protocol,
+ is_sequence,
+ is_tuple,
+ is_type_alias,
+ is_typeddict,
+ is_union_type,
+ signature,
+)
+from .cols import (
+ is_namedtuple,
+ iterable_unstructure_factory,
+ list_structure_factory,
+ namedtuple_structure_factory,
+ namedtuple_unstructure_factory,
+)
+from .disambiguators import create_default_dis_func, is_supported_union
+from .dispatch import (
+ HookFactory,
+ MultiStrategyDispatch,
+ StructuredValue,
+ StructureHook,
+ TargetType,
+ UnstructuredValue,
+ UnstructureHook,
+)
+from .errors import (
+ IterableValidationError,
+ IterableValidationNote,
+ StructureHandlerNotFoundError,
+)
+from .fns import Predicate, identity, raise_error
+from .gen import (
+ AttributeOverride,
+ DictStructureFn,
+ HeteroTupleUnstructureFn,
+ IterableUnstructureFn,
+ MappingStructureFn,
+ MappingUnstructureFn,
+ make_dict_structure_fn,
+ make_dict_unstructure_fn,
+ make_hetero_tuple_unstructure_fn,
+ make_mapping_structure_fn,
+ make_mapping_unstructure_fn,
+)
+from .gen.typeddicts import make_dict_structure_fn as make_typeddict_dict_struct_fn
+from .gen.typeddicts import make_dict_unstructure_fn as make_typeddict_dict_unstruct_fn
+
+__all__ = ["UnstructureStrategy", "BaseConverter", "Converter", "GenConverter"]
+
+T = TypeVar("T")
+V = TypeVar("V")
+
+UnstructureHookFactory = TypeVar(
+ "UnstructureHookFactory", bound=HookFactory[UnstructureHook]
+)
+
+# The Extended factory also takes a converter.
+ExtendedUnstructureHookFactory: TypeAlias = Callable[[TargetType, T], UnstructureHook]
+
+# This typevar for the BaseConverter.
+AnyUnstructureHookFactoryBase = TypeVar(
+ "AnyUnstructureHookFactoryBase",
+ bound="HookFactory[UnstructureHook] | ExtendedUnstructureHookFactory[BaseConverter]",
+)
+
+# This typevar for the Converter.
+AnyUnstructureHookFactory = TypeVar(
+ "AnyUnstructureHookFactory",
+ bound="HookFactory[UnstructureHook] | ExtendedUnstructureHookFactory[Converter]",
+)
+
+StructureHookFactory = TypeVar("StructureHookFactory", bound=HookFactory[StructureHook])
+
+# The Extended factory also takes a converter.
+ExtendedStructureHookFactory: TypeAlias = Callable[[TargetType, T], StructureHook]
+
+# This typevar for the BaseConverter.
+AnyStructureHookFactoryBase = TypeVar(
+ "AnyStructureHookFactoryBase",
+ bound="HookFactory[StructureHook] | ExtendedStructureHookFactory[BaseConverter]",
+)
+
+# This typevar for the Converter.
+AnyStructureHookFactory = TypeVar(
+ "AnyStructureHookFactory",
+ bound="HookFactory[StructureHook] | ExtendedStructureHookFactory[Converter]",
+)
+
+UnstructureHookT = TypeVar("UnstructureHookT", bound=UnstructureHook)
+StructureHookT = TypeVar("StructureHookT", bound=StructureHook)
+
+
+class UnstructureStrategy(Enum):
+ """`attrs` classes unstructuring strategies."""
+
+ AS_DICT = "asdict"
+ AS_TUPLE = "astuple"
+
+
+def is_literal_containing_enums(typ: type) -> bool:
+ return is_literal(typ) and any(isinstance(val, Enum) for val in typ.__args__)
+
+
+def _is_extended_factory(factory: Callable) -> bool:
+ """Does this factory also accept a converter arg?"""
+ # We use the original `inspect.signature` to not evaluate string
+ # annotations.
+ sig = inspect_signature(factory)
+ return (
+ len(sig.parameters) >= 2
+ and (list(sig.parameters.values())[1]).default is Signature.empty
+ )
+
+
+class BaseConverter:
+ """Converts between structured and unstructured data."""
+
+ __slots__ = (
+ "_unstructure_func",
+ "_unstructure_attrs",
+ "_structure_attrs",
+ "_dict_factory",
+ "_union_struct_registry",
+ "_structure_func",
+ "_prefer_attrib_converters",
+ "detailed_validation",
+ "_struct_copy_skip",
+ "_unstruct_copy_skip",
+ )
+
+ def __init__(
+ self,
+ dict_factory: Callable[[], Any] = dict,
+ unstruct_strat: UnstructureStrategy = UnstructureStrategy.AS_DICT,
+ prefer_attrib_converters: bool = False,
+ detailed_validation: bool = True,
+ unstructure_fallback_factory: HookFactory[UnstructureHook] = lambda _: identity,
+ structure_fallback_factory: HookFactory[StructureHook] = lambda _: raise_error,
+ ) -> None:
+ """
+ :param detailed_validation: Whether to use a slightly slower mode for detailed
+ validation errors.
+ :param unstructure_fallback_factory: A hook factory to be called when no
+ registered unstructuring hooks match.
+ :param structure_fallback_factory: A hook factory to be called when no
+ registered structuring hooks match.
+
+ .. versionadded:: 23.2.0 *unstructure_fallback_factory*
+ .. versionadded:: 23.2.0 *structure_fallback_factory*
+ """
+ unstruct_strat = UnstructureStrategy(unstruct_strat)
+ self._prefer_attrib_converters = prefer_attrib_converters
+
+ self.detailed_validation = detailed_validation
+ self._union_struct_registry: dict[Any, Callable[[Any, type[T]], T]] = {}
+
+ # Create a per-instance cache.
+ if unstruct_strat is UnstructureStrategy.AS_DICT:
+ self._unstructure_attrs = self.unstructure_attrs_asdict
+ self._structure_attrs = self.structure_attrs_fromdict
+ else:
+ self._unstructure_attrs = self.unstructure_attrs_astuple
+ self._structure_attrs = self.structure_attrs_fromtuple
+
+ self._unstructure_func = MultiStrategyDispatch(
+ unstructure_fallback_factory, self
+ )
+ self._unstructure_func.register_cls_list(
+ [(bytes, identity), (str, identity), (Path, str)]
+ )
+ self._unstructure_func.register_func_list(
+ [
+ (
+ is_protocol,
+ lambda o: self.unstructure(o, unstructure_as=o.__class__),
+ ),
+ (
+ lambda t: get_final_base(t) is not None,
+ lambda t: self.get_unstructure_hook(get_final_base(t)),
+ True,
+ ),
+ (
+ is_type_alias,
+ lambda t: self.get_unstructure_hook(get_type_alias_base(t)),
+ True,
+ ),
+ (is_mapping, self._unstructure_mapping),
+ (is_sequence, self._unstructure_seq),
+ (is_mutable_set, self._unstructure_seq),
+ (is_frozenset, self._unstructure_seq),
+ (lambda t: issubclass(t, Enum), self._unstructure_enum),
+ (has, self._unstructure_attrs),
+ (is_union_type, self._unstructure_union),
+ (lambda t: t in ANIES, self.unstructure),
+ ]
+ )
+
+ # Per-instance register of to-attrs converters.
+ # Singledispatch dispatches based on the first argument, so we
+ # store the function and switch the arguments in self.loads.
+ self._structure_func = MultiStrategyDispatch(structure_fallback_factory, self)
+ self._structure_func.register_func_list(
+ [
+ (
+ lambda cl: cl in ANIES or cl is Optional or cl is None,
+ lambda v, _: v,
+ ),
+ (is_generic_attrs, self._gen_structure_generic, True),
+ (lambda t: get_newtype_base(t) is not None, self._structure_newtype),
+ (is_type_alias, self._find_type_alias_structure_hook, True),
+ (
+ lambda t: get_final_base(t) is not None,
+ self._structure_final_factory,
+ True,
+ ),
+ (is_literal, self._structure_simple_literal),
+ (is_literal_containing_enums, self._structure_enum_literal),
+ (is_sequence, list_structure_factory, "extended"),
+ (is_deque, self._structure_deque),
+ (is_mutable_set, self._structure_set),
+ (is_frozenset, self._structure_frozenset),
+ (is_tuple, self._structure_tuple),
+ (is_namedtuple, namedtuple_structure_factory, "extended"),
+ (is_mapping, self._structure_dict),
+ (is_supported_union, self._gen_attrs_union_structure, True),
+ (is_optional, self._structure_optional),
+ (
+ lambda t: is_union_type(t) and t in self._union_struct_registry,
+ self._union_struct_registry.__getitem__,
+ True,
+ ),
+ (has, self._structure_attrs),
+ ]
+ )
+ # Strings are sequences.
+ self._structure_func.register_cls_list(
+ [
+ (str, self._structure_call),
+ (bytes, self._structure_call),
+ (int, self._structure_call),
+ (float, self._structure_call),
+ (Enum, self._structure_call),
+ (Path, self._structure_call),
+ ]
+ )
+
+ self._dict_factory = dict_factory
+
+ self._unstruct_copy_skip = self._unstructure_func.get_num_fns()
+ self._struct_copy_skip = self._structure_func.get_num_fns()
+
+ def unstructure(self, obj: Any, unstructure_as: Any = None) -> Any:
+ return self._unstructure_func.dispatch(
+ obj.__class__ if unstructure_as is None else unstructure_as
+ )(obj)
+
+ @property
+ def unstruct_strat(self) -> UnstructureStrategy:
+ """The default way of unstructuring ``attrs`` classes."""
+ return (
+ UnstructureStrategy.AS_DICT
+ if self._unstructure_attrs == self.unstructure_attrs_asdict
+ else UnstructureStrategy.AS_TUPLE
+ )
+
+ @overload
+ def register_unstructure_hook(self, cls: UnstructureHookT) -> UnstructureHookT: ...
+
+ @overload
+ def register_unstructure_hook(self, cls: Any, func: UnstructureHook) -> None: ...
+
+ def register_unstructure_hook(
+ self, cls: Any = None, func: UnstructureHook | None = None
+ ) -> Callable[[UnstructureHook]] | None:
+ """Register a class-to-primitive converter function for a class.
+
+ The converter function should take an instance of the class and return
+ its Python equivalent.
+
+ May also be used as a decorator. When used as a decorator, the first
+ argument annotation from the decorated function will be used as the
+ type to register the hook for.
+
+ .. versionchanged:: 24.1.0
+ This method may now be used as a decorator.
+ """
+ if func is None:
+ # Autodetecting decorator.
+ func = cls
+ sig = signature(func)
+ cls = next(iter(sig.parameters.values())).annotation
+ self.register_unstructure_hook(cls, func)
+
+ return func
+
+ if attrs_has(cls):
+ resolve_types(cls)
+ if is_union_type(cls):
+ self._unstructure_func.register_func_list([(lambda t: t == cls, func)])
+ elif get_newtype_base(cls) is not None:
+ # This is a newtype, so we handle it specially.
+ self._unstructure_func.register_func_list([(lambda t: t is cls, func)])
+ else:
+ self._unstructure_func.register_cls_list([(cls, func)])
+ return None
+
+ def register_unstructure_hook_func(
+ self, check_func: Predicate, func: UnstructureHook
+ ) -> None:
+ """Register a class-to-primitive converter function for a class, using
+ a function to check if it's a match.
+ """
+ self._unstructure_func.register_func_list([(check_func, func)])
+
+ @overload
+ def register_unstructure_hook_factory(
+ self, predicate: Predicate
+ ) -> Callable[[AnyUnstructureHookFactoryBase], AnyUnstructureHookFactoryBase]: ...
+
+ @overload
+ def register_unstructure_hook_factory(
+ self, predicate: Predicate, factory: UnstructureHookFactory
+ ) -> UnstructureHookFactory: ...
+
+ @overload
+ def register_unstructure_hook_factory(
+ self,
+ predicate: Predicate,
+ factory: ExtendedUnstructureHookFactory[BaseConverter],
+ ) -> ExtendedUnstructureHookFactory[BaseConverter]: ...
+
+ def register_unstructure_hook_factory(self, predicate, factory=None):
+ """
+ Register a hook factory for a given predicate.
+
+ The hook factory may expose an additional required parameter. In this case,
+ the current converter will be provided to the hook factory as that
+ parameter.
+
+ May also be used as a decorator.
+
+ :param predicate: A function that, given a type, returns whether the factory
+ can produce a hook for that type.
+ :param factory: A callable that, given a type, produces an unstructuring
+ hook for that type. This unstructuring hook will be cached.
+
+ .. versionchanged:: 24.1.0
+ This method may now be used as a decorator.
+ The factory may also receive the converter as a second, required argument.
+ """
+ if factory is None:
+
+ def decorator(factory):
+ # Is this an extended factory (takes a converter too)?
+ if _is_extended_factory(factory):
+ self._unstructure_func.register_func_list(
+ [(predicate, factory, "extended")]
+ )
+ else:
+ self._unstructure_func.register_func_list(
+ [(predicate, factory, True)]
+ )
+
+ return decorator
+
+ self._unstructure_func.register_func_list(
+ [
+ (
+ predicate,
+ factory,
+ "extended" if _is_extended_factory(factory) else True,
+ )
+ ]
+ )
+ return factory
+
+ def get_unstructure_hook(
+ self, type: Any, cache_result: bool = True
+ ) -> UnstructureHook:
+ """Get the unstructure hook for the given type.
+
+ This hook can be manually called, or composed with other functions
+ and re-registered.
+
+ If no hook is registered, the converter unstructure fallback factory
+ will be used to produce one.
+
+ :param cache: Whether to cache the returned hook.
+
+ .. versionadded:: 24.1.0
+ """
+ return (
+ self._unstructure_func.dispatch(type)
+ if cache_result
+ else self._unstructure_func.dispatch_without_caching(type)
+ )
+
+ @overload
+ def register_structure_hook(self, cl: StructureHookT) -> StructureHookT: ...
+
+ @overload
+ def register_structure_hook(self, cl: Any, func: StructureHook) -> None: ...
+
+ def register_structure_hook(
+ self, cl: Any, func: StructureHook | None = None
+ ) -> None:
+ """Register a primitive-to-class converter function for a type.
+
+ The converter function should take two arguments:
+ * a Python object to be converted,
+ * the type to convert to
+
+ and return the instance of the class. The type may seem redundant, but
+ is sometimes needed (for example, when dealing with generic classes).
+
+ This method may be used as a decorator. In this case, the decorated
+ hook must have a return type annotation, and this annotation will be used
+ as the type for the hook.
+
+ .. versionchanged:: 24.1.0
+ This method may now be used as a decorator.
+ """
+ if func is None:
+ # The autodetecting decorator.
+ func = cl
+ sig = signature(func)
+ self.register_structure_hook(sig.return_annotation, func)
+ return func
+
+ if attrs_has(cl):
+ resolve_types(cl)
+ if is_union_type(cl):
+ self._union_struct_registry[cl] = func
+ self._structure_func.clear_cache()
+ elif get_newtype_base(cl) is not None:
+ # This is a newtype, so we handle it specially.
+ self._structure_func.register_func_list([(lambda t: t is cl, func)])
+ else:
+ self._structure_func.register_cls_list([(cl, func)])
+ return None
+
+ def register_structure_hook_func(
+ self, check_func: Predicate, func: StructureHook
+ ) -> None:
+ """Register a class-to-primitive converter function for a class, using
+ a function to check if it's a match.
+ """
+ self._structure_func.register_func_list([(check_func, func)])
+
+ @overload
+ def register_structure_hook_factory(
+ self, predicate: Predicate
+ ) -> Callable[[AnyStructureHookFactoryBase], AnyStructureHookFactoryBase]: ...
+
+ @overload
+ def register_structure_hook_factory(
+ self, predicate: Predicate, factory: StructureHookFactory
+ ) -> StructureHookFactory: ...
+
+ @overload
+ def register_structure_hook_factory(
+ self, predicate: Predicate, factory: ExtendedStructureHookFactory[BaseConverter]
+ ) -> ExtendedStructureHookFactory[BaseConverter]: ...
+
+ def register_structure_hook_factory(self, predicate, factory=None):
+ """
+ Register a hook factory for a given predicate.
+
+ The hook factory may expose an additional required parameter. In this case,
+ the current converter will be provided to the hook factory as that
+ parameter.
+
+ May also be used as a decorator.
+
+ :param predicate: A function that, given a type, returns whether the factory
+ can produce a hook for that type.
+ :param factory: A callable that, given a type, produces a structuring
+ hook for that type. This structuring hook will be cached.
+
+ .. versionchanged:: 24.1.0
+ This method may now be used as a decorator.
+ The factory may also receive the converter as a second, required argument.
+ """
+ if factory is None:
+ # Decorator use.
+ def decorator(factory):
+ # Is this an extended factory (takes a converter too)?
+ if _is_extended_factory(factory):
+ self._structure_func.register_func_list(
+ [(predicate, factory, "extended")]
+ )
+ else:
+ self._structure_func.register_func_list(
+ [(predicate, factory, True)]
+ )
+
+ return decorator
+ self._structure_func.register_func_list(
+ [
+ (
+ predicate,
+ factory,
+ "extended" if _is_extended_factory(factory) else True,
+ )
+ ]
+ )
+ return factory
+
+ def structure(self, obj: UnstructuredValue, cl: type[T]) -> T:
+ """Convert unstructured Python data structures to structured data."""
+ return self._structure_func.dispatch(cl)(obj, cl)
+
+ def get_structure_hook(self, type: Any, cache_result: bool = True) -> StructureHook:
+ """Get the structure hook for the given type.
+
+ This hook can be manually called, or composed with other functions
+ and re-registered.
+
+ If no hook is registered, the converter structure fallback factory
+ will be used to produce one.
+
+ :param cache: Whether to cache the returned hook.
+
+ .. versionadded:: 24.1.0
+ """
+ return (
+ self._structure_func.dispatch(type)
+ if cache_result
+ else self._structure_func.dispatch_without_caching(type)
+ )
+
+ # Classes to Python primitives.
+ def unstructure_attrs_asdict(self, obj: Any) -> dict[str, Any]:
+ """Our version of `attrs.asdict`, so we can call back to us."""
+ attrs = fields(obj.__class__)
+ dispatch = self._unstructure_func.dispatch
+ rv = self._dict_factory()
+ for a in attrs:
+ name = a.name
+ v = getattr(obj, name)
+ rv[name] = dispatch(a.type or v.__class__)(v)
+ return rv
+
+ def unstructure_attrs_astuple(self, obj: Any) -> tuple[Any, ...]:
+ """Our version of `attrs.astuple`, so we can call back to us."""
+ attrs = fields(obj.__class__)
+ dispatch = self._unstructure_func.dispatch
+ res = []
+ for a in attrs:
+ name = a.name
+ v = getattr(obj, name)
+ res.append(dispatch(a.type or v.__class__)(v))
+ return tuple(res)
+
+ def _unstructure_enum(self, obj: Enum) -> Any:
+ """Convert an enum to its value."""
+ return obj.value
+
+ def _unstructure_seq(self, seq: Sequence[T]) -> Sequence[T]:
+ """Convert a sequence to primitive equivalents."""
+ # We can reuse the sequence class, so tuples stay tuples.
+ dispatch = self._unstructure_func.dispatch
+ return seq.__class__(dispatch(e.__class__)(e) for e in seq)
+
+ def _unstructure_mapping(self, mapping: Mapping[T, V]) -> Mapping[T, V]:
+ """Convert a mapping of attr classes to primitive equivalents."""
+
+ # We can reuse the mapping class, so dicts stay dicts and OrderedDicts
+ # stay OrderedDicts.
+ dispatch = self._unstructure_func.dispatch
+ return mapping.__class__(
+ (dispatch(k.__class__)(k), dispatch(v.__class__)(v))
+ for k, v in mapping.items()
+ )
+
+ # note: Use UnionType when 3.11 is released as
+ # the behaviour of @final is changed. This would
+ # affect how we can support UnionType in ._compat.py
+ def _unstructure_union(self, obj: Any) -> Any:
+ """
+ Unstructure an object as a union.
+
+ By default, just unstructures the instance.
+ """
+ return self._unstructure_func.dispatch(obj.__class__)(obj)
+
+ # Python primitives to classes.
+
+ def _gen_structure_generic(self, cl: type[T]) -> DictStructureFn[T]:
+ """Create and return a hook for structuring generics."""
+ return make_dict_structure_fn(
+ cl, self, _cattrs_prefer_attrib_converters=self._prefer_attrib_converters
+ )
+
+ def _gen_attrs_union_structure(
+ self, cl: Any, use_literals: bool = True
+ ) -> Callable[[Any, type[T]], type[T] | None]:
+ """
+ Generate a structuring function for a union of attrs classes (and maybe None).
+
+ :param use_literals: Whether to consider literal fields.
+ """
+ dis_fn = self._get_dis_func(cl, use_literals=use_literals)
+ has_none = NoneType in cl.__args__
+
+ if has_none:
+
+ def structure_attrs_union(obj, _) -> cl:
+ if obj is None:
+ return None
+ return self.structure(obj, dis_fn(obj))
+
+ else:
+
+ def structure_attrs_union(obj, _):
+ return self.structure(obj, dis_fn(obj))
+
+ return structure_attrs_union
+
+ @staticmethod
+ def _structure_call(obj: Any, cl: type[T]) -> Any:
+ """Just call ``cl`` with the given ``obj``.
+
+ This is just an optimization on the ``_structure_default`` case, when
+ we know we can skip the ``if`` s. Use for ``str``, ``bytes``, ``enum``,
+ etc.
+ """
+ return cl(obj)
+
+ @staticmethod
+ def _structure_simple_literal(val, type):
+ if val not in type.__args__:
+ raise Exception(f"{val} not in literal {type}")
+ return val
+
+ @staticmethod
+ def _structure_enum_literal(val, type):
+ vals = {(x.value if isinstance(x, Enum) else x): x for x in type.__args__}
+ try:
+ return vals[val]
+ except KeyError:
+ raise Exception(f"{val} not in literal {type}") from None
+
+ def _structure_newtype(self, val: UnstructuredValue, type) -> StructuredValue:
+ base = get_newtype_base(type)
+ return self.get_structure_hook(base)(val, base)
+
+ def _find_type_alias_structure_hook(self, type: Any) -> StructureHook:
+ base = get_type_alias_base(type)
+ res = self.get_structure_hook(base)
+ if res == self._structure_call:
+ # we need to replace the type arg of `structure_call`
+ return lambda v, _, __base=base: __base(v)
+ return lambda v, _, __base=base: res(v, __base)
+
+ def _structure_final_factory(self, type):
+ base = get_final_base(type)
+ res = self.get_structure_hook(base)
+ return lambda v, _, __base=base: res(v, __base)
+
+ # Attrs classes.
+
+ def structure_attrs_fromtuple(self, obj: tuple[Any, ...], cl: type[T]) -> T:
+ """Load an attrs class from a sequence (tuple)."""
+ conv_obj = [] # A list of converter parameters.
+ for a, value in zip(fields(cl), obj):
+ # We detect the type by the metadata.
+ converted = self._structure_attribute(a, value)
+ conv_obj.append(converted)
+
+ return cl(*conv_obj)
+
+ def _structure_attribute(self, a: Attribute | Field, value: Any) -> Any:
+ """Handle an individual attrs attribute."""
+ type_ = a.type
+ attrib_converter = getattr(a, "converter", None)
+ if self._prefer_attrib_converters and attrib_converter:
+ # A attrib converter is defined on this attribute, and
+ # prefer_attrib_converters is set to give these priority over registered
+ # structure hooks. So, pass through the raw value, which attrs will flow
+ # into the converter
+ return value
+ if type_ is None:
+ # No type metadata.
+ return value
+
+ try:
+ return self._structure_func.dispatch(type_)(value, type_)
+ except StructureHandlerNotFoundError:
+ if attrib_converter:
+ # Return the original value and fallback to using an attrib converter.
+ return value
+ raise
+
+ def structure_attrs_fromdict(self, obj: Mapping[str, Any], cl: type[T]) -> T:
+ """Instantiate an attrs class from a mapping (dict)."""
+ # For public use.
+
+ conv_obj = {} # Start with a fresh dict, to ignore extra keys.
+ for a in fields(cl):
+ try:
+ val = obj[a.name]
+ except KeyError:
+ continue
+
+ # try .alias and .name because this code also supports dataclasses!
+ conv_obj[getattr(a, "alias", a.name)] = self._structure_attribute(a, val)
+
+ return cl(**conv_obj)
+
+ def _structure_deque(self, obj: Iterable[T], cl: Any) -> deque[T]:
+ """Convert an iterable to a potentially generic deque."""
+ if is_bare(cl) or cl.__args__[0] in ANIES:
+ res = deque(obj)
+ else:
+ elem_type = cl.__args__[0]
+ handler = self._structure_func.dispatch(elem_type)
+ if self.detailed_validation:
+ errors = []
+ res = deque()
+ ix = 0 # Avoid `enumerate` for performance.
+ for e in obj:
+ try:
+ res.append(handler(e, elem_type))
+ except Exception as e:
+ msg = IterableValidationNote(
+ f"Structuring {cl} @ index {ix}", ix, elem_type
+ )
+ e.__notes__ = [*getattr(e, "__notes__", []), msg]
+ errors.append(e)
+ finally:
+ ix += 1
+ if errors:
+ raise IterableValidationError(
+ f"While structuring {cl!r}", errors, cl
+ )
+ else:
+ res = deque(handler(e, elem_type) for e in obj)
+ return res
+
+ def _structure_set(
+ self, obj: Iterable[T], cl: Any, structure_to: type = set
+ ) -> Set[T]:
+ """Convert an iterable into a potentially generic set."""
+ if is_bare(cl) or cl.__args__[0] in ANIES:
+ return structure_to(obj)
+ elem_type = cl.__args__[0]
+ handler = self._structure_func.dispatch(elem_type)
+ if self.detailed_validation:
+ errors = []
+ res = set()
+ ix = 0
+ for e in obj:
+ try:
+ res.add(handler(e, elem_type))
+ except Exception as exc:
+ msg = IterableValidationNote(
+ f"Structuring {structure_to.__name__} @ element {e!r}",
+ ix,
+ elem_type,
+ )
+ exc.__notes__ = [*getattr(exc, "__notes__", []), msg]
+ errors.append(exc)
+ finally:
+ ix += 1
+ if errors:
+ raise IterableValidationError(f"While structuring {cl!r}", errors, cl)
+ return res if structure_to is set else structure_to(res)
+ if structure_to is set:
+ return {handler(e, elem_type) for e in obj}
+ return structure_to([handler(e, elem_type) for e in obj])
+
+ def _structure_frozenset(
+ self, obj: Iterable[T], cl: Any
+ ) -> FrozenSetSubscriptable[T]:
+ """Convert an iterable into a potentially generic frozenset."""
+ return self._structure_set(obj, cl, structure_to=frozenset)
+
+ def _structure_dict(self, obj: Mapping[T, V], cl: Any) -> dict[T, V]:
+ """Convert a mapping into a potentially generic dict."""
+ if is_bare(cl) or cl.__args__ == (Any, Any):
+ return dict(obj)
+ key_type, val_type = cl.__args__
+
+ if self.detailed_validation:
+ key_handler = self._structure_func.dispatch(key_type)
+ val_handler = self._structure_func.dispatch(val_type)
+ errors = []
+ res = {}
+
+ for k, v in obj.items():
+ try:
+ value = val_handler(v, val_type)
+ except Exception as exc:
+ msg = IterableValidationNote(
+ f"Structuring mapping value @ key {k!r}", k, val_type
+ )
+ exc.__notes__ = [*getattr(exc, "__notes__", []), msg]
+ errors.append(exc)
+ continue
+
+ try:
+ key = key_handler(k, key_type)
+ res[key] = value
+ except Exception as exc:
+ msg = IterableValidationNote(
+ f"Structuring mapping key @ key {k!r}", k, key_type
+ )
+ exc.__notes__ = [*getattr(exc, "__notes__", []), msg]
+ errors.append(exc)
+
+ if errors:
+ raise IterableValidationError(f"While structuring {cl!r}", errors, cl)
+ return res
+
+ if key_type in ANIES:
+ val_conv = self._structure_func.dispatch(val_type)
+ return {k: val_conv(v, val_type) for k, v in obj.items()}
+ if val_type in ANIES:
+ key_conv = self._structure_func.dispatch(key_type)
+ return {key_conv(k, key_type): v for k, v in obj.items()}
+ key_conv = self._structure_func.dispatch(key_type)
+ val_conv = self._structure_func.dispatch(val_type)
+ return {key_conv(k, key_type): val_conv(v, val_type) for k, v in obj.items()}
+
+ def _structure_optional(self, obj, union):
+ if obj is None:
+ return None
+ union_params = union.__args__
+ other = union_params[0] if union_params[1] is NoneType else union_params[1]
+ # We can't actually have a Union of a Union, so this is safe.
+ return self._structure_func.dispatch(other)(obj, other)
+
+ def _structure_tuple(self, obj: Any, tup: type[T]) -> T:
+ """Deal with structuring into a tuple."""
+ tup_params = None if tup in (Tuple, tuple) else tup.__args__
+ has_ellipsis = tup_params and tup_params[-1] is Ellipsis
+ if tup_params is None or (has_ellipsis and tup_params[0] in ANIES):
+ # Just a Tuple. (No generic information.)
+ return tuple(obj)
+ if has_ellipsis:
+ # We're dealing with a homogenous tuple, Tuple[int, ...]
+ tup_type = tup_params[0]
+ conv = self._structure_func.dispatch(tup_type)
+ if self.detailed_validation:
+ errors = []
+ res = []
+ ix = 0
+ for e in obj:
+ try:
+ res.append(conv(e, tup_type))
+ except Exception as exc:
+ msg = IterableValidationNote(
+ f"Structuring {tup} @ index {ix}", ix, tup_type
+ )
+ exc.__notes__ = [*getattr(exc, "__notes__", []), msg]
+ errors.append(exc)
+ finally:
+ ix += 1
+ if errors:
+ raise IterableValidationError(
+ f"While structuring {tup!r}", errors, tup
+ )
+ return tuple(res)
+ return tuple(conv(e, tup_type) for e in obj)
+
+ # We're dealing with a heterogenous tuple.
+ exp_len = len(tup_params)
+ try:
+ len_obj = len(obj)
+ except TypeError:
+ pass # most likely an unsized iterator, eg generator
+ else:
+ if len_obj > exp_len:
+ exp_len = len_obj
+ if self.detailed_validation:
+ errors = []
+ res = []
+ for ix, (t, e) in enumerate(zip(tup_params, obj)):
+ try:
+ conv = self._structure_func.dispatch(t)
+ res.append(conv(e, t))
+ except Exception as exc:
+ msg = IterableValidationNote(
+ f"Structuring {tup} @ index {ix}", ix, t
+ )
+ exc.__notes__ = [*getattr(exc, "__notes__", []), msg]
+ errors.append(exc)
+ if len(res) < exp_len:
+ problem = "Not enough" if len(res) < len(tup_params) else "Too many"
+ exc = ValueError(f"{problem} values in {obj!r} to structure as {tup!r}")
+ msg = f"Structuring {tup}"
+ exc.__notes__ = [*getattr(exc, "__notes__", []), msg]
+ errors.append(exc)
+ if errors:
+ raise IterableValidationError(f"While structuring {tup!r}", errors, tup)
+ return tuple(res)
+
+ res = tuple(
+ [self._structure_func.dispatch(t)(e, t) for t, e in zip(tup_params, obj)]
+ )
+ if len(res) < exp_len:
+ problem = "Not enough" if len(res) < len(tup_params) else "Too many"
+ raise ValueError(f"{problem} values in {obj!r} to structure as {tup!r}")
+ return res
+
+ def _get_dis_func(
+ self,
+ union: Any,
+ use_literals: bool = True,
+ overrides: dict[str, AttributeOverride] | None = None,
+ ) -> Callable[[Any], type]:
+ """Fetch or try creating a disambiguation function for a union."""
+ union_types = union.__args__
+ if NoneType in union_types:
+ # We support unions of attrs classes and NoneType higher in the
+ # logic.
+ union_types = tuple(e for e in union_types if e is not NoneType)
+
+ # TODO: technically both disambiguators could support TypedDicts and
+ # dataclasses...
+ if not all(has(get_origin(e) or e) for e in union_types):
+ raise StructureHandlerNotFoundError(
+ "Only unions of attrs classes supported "
+ "currently. Register a structure hook manually.",
+ type_=union,
+ )
+
+ return create_default_dis_func(
+ self,
+ *union_types,
+ use_literals=use_literals,
+ overrides=overrides if overrides is not None else "from_converter",
+ )
+
+ def __deepcopy__(self, _) -> BaseConverter:
+ return self.copy()
+
+ def copy(
+ self,
+ dict_factory: Callable[[], Any] | None = None,
+ unstruct_strat: UnstructureStrategy | None = None,
+ prefer_attrib_converters: bool | None = None,
+ detailed_validation: bool | None = None,
+ ) -> BaseConverter:
+ """Create a copy of the converter, keeping all existing custom hooks.
+
+ :param detailed_validation: Whether to use a slightly slower mode for detailed
+ validation errors.
+ """
+ res = self.__class__(
+ dict_factory if dict_factory is not None else self._dict_factory,
+ (
+ unstruct_strat
+ if unstruct_strat is not None
+ else (
+ UnstructureStrategy.AS_DICT
+ if self._unstructure_attrs == self.unstructure_attrs_asdict
+ else UnstructureStrategy.AS_TUPLE
+ )
+ ),
+ (
+ prefer_attrib_converters
+ if prefer_attrib_converters is not None
+ else self._prefer_attrib_converters
+ ),
+ (
+ detailed_validation
+ if detailed_validation is not None
+ else self.detailed_validation
+ ),
+ )
+
+ self._unstructure_func.copy_to(res._unstructure_func, self._unstruct_copy_skip)
+ self._structure_func.copy_to(res._structure_func, self._struct_copy_skip)
+
+ return res
+
+
+class Converter(BaseConverter):
+ """A converter which generates specialized un/structuring functions."""
+
+ __slots__ = (
+ "omit_if_default",
+ "forbid_extra_keys",
+ "type_overrides",
+ "_unstruct_collection_overrides",
+ )
+
+ def __init__(
+ self,
+ dict_factory: Callable[[], Any] = dict,
+ unstruct_strat: UnstructureStrategy = UnstructureStrategy.AS_DICT,
+ omit_if_default: bool = False,
+ forbid_extra_keys: bool = False,
+ type_overrides: Mapping[type, AttributeOverride] = {},
+ unstruct_collection_overrides: Mapping[type, Callable] = {},
+ prefer_attrib_converters: bool = False,
+ detailed_validation: bool = True,
+ unstructure_fallback_factory: HookFactory[UnstructureHook] = lambda _: identity,
+ structure_fallback_factory: HookFactory[StructureHook] = lambda _: raise_error,
+ ):
+ """
+ :param detailed_validation: Whether to use a slightly slower mode for detailed
+ validation errors.
+ :param unstructure_fallback_factory: A hook factory to be called when no
+ registered unstructuring hooks match.
+ :param structure_fallback_factory: A hook factory to be called when no
+ registered structuring hooks match.
+
+ .. versionadded:: 23.2.0 *unstructure_fallback_factory*
+ .. versionadded:: 23.2.0 *structure_fallback_factory*
+ """
+ super().__init__(
+ dict_factory=dict_factory,
+ unstruct_strat=unstruct_strat,
+ prefer_attrib_converters=prefer_attrib_converters,
+ detailed_validation=detailed_validation,
+ unstructure_fallback_factory=unstructure_fallback_factory,
+ structure_fallback_factory=structure_fallback_factory,
+ )
+ self.omit_if_default = omit_if_default
+ self.forbid_extra_keys = forbid_extra_keys
+ self.type_overrides = dict(type_overrides)
+
+ unstruct_collection_overrides = {
+ get_origin(k) or k: v for k, v in unstruct_collection_overrides.items()
+ }
+
+ self._unstruct_collection_overrides = unstruct_collection_overrides
+
+ # Do a little post-processing magic to make things easier for users.
+ co = unstruct_collection_overrides
+
+ # abc.Set overrides, if defined, apply to abc.MutableSets and sets
+ if OriginAbstractSet in co:
+ if OriginMutableSet not in co:
+ co[OriginMutableSet] = co[OriginAbstractSet]
+ co[AbcMutableSet] = co[OriginAbstractSet] # For 3.8 compatibility.
+ if FrozenSetSubscriptable not in co:
+ co[FrozenSetSubscriptable] = co[OriginAbstractSet]
+
+ # abc.MutableSet overrrides, if defined, apply to sets
+ if OriginMutableSet in co and set not in co:
+ co[set] = co[OriginMutableSet]
+
+ if FrozenSetSubscriptable in co:
+ co[frozenset] = co[FrozenSetSubscriptable] # For 3.8 compatibility.
+
+ # abc.Sequence overrides, if defined, can apply to MutableSequences, lists and
+ # tuples
+ if Sequence in co:
+ if MutableSequence not in co:
+ co[MutableSequence] = co[Sequence]
+ if tuple not in co:
+ co[tuple] = co[Sequence]
+
+ # abc.MutableSequence overrides, if defined, can apply to lists
+ if MutableSequence in co:
+ if list not in co:
+ co[list] = co[MutableSequence]
+ if deque not in co:
+ co[deque] = co[MutableSequence]
+
+ # abc.Mapping overrides, if defined, can apply to MutableMappings
+ if Mapping in co and MutableMapping not in co:
+ co[MutableMapping] = co[Mapping]
+
+ # abc.MutableMapping overrides, if defined, can apply to dicts
+ if MutableMapping in co and dict not in co:
+ co[dict] = co[MutableMapping]
+
+ # builtins.dict overrides, if defined, can apply to counters
+ if dict in co and Counter not in co:
+ co[Counter] = co[dict]
+
+ if unstruct_strat is UnstructureStrategy.AS_DICT:
+ # Override the attrs handler.
+ self.register_unstructure_hook_factory(
+ has_with_generic, self.gen_unstructure_attrs_fromdict
+ )
+ self.register_structure_hook_factory(
+ has_with_generic, self.gen_structure_attrs_fromdict
+ )
+ self.register_unstructure_hook_factory(
+ is_annotated, self.gen_unstructure_annotated
+ )
+ self.register_unstructure_hook_factory(
+ is_hetero_tuple, self.gen_unstructure_hetero_tuple
+ )
+ self.register_unstructure_hook_factory(is_namedtuple)(
+ namedtuple_unstructure_factory
+ )
+ self.register_unstructure_hook_factory(
+ is_sequence, self.gen_unstructure_iterable
+ )
+ self.register_unstructure_hook_factory(is_mapping, self.gen_unstructure_mapping)
+ self.register_unstructure_hook_factory(
+ is_mutable_set,
+ lambda cl: self.gen_unstructure_iterable(cl, unstructure_to=set),
+ )
+ self.register_unstructure_hook_factory(
+ is_frozenset,
+ lambda cl: self.gen_unstructure_iterable(cl, unstructure_to=frozenset),
+ )
+ self.register_unstructure_hook_factory(
+ is_optional, self.gen_unstructure_optional
+ )
+ self.register_unstructure_hook_factory(
+ is_typeddict, self.gen_unstructure_typeddict
+ )
+ self.register_unstructure_hook_factory(
+ lambda t: get_newtype_base(t) is not None,
+ lambda t: self.get_unstructure_hook(get_newtype_base(t)),
+ )
+
+ self.register_structure_hook_factory(is_annotated, self.gen_structure_annotated)
+ self.register_structure_hook_factory(is_mapping, self.gen_structure_mapping)
+ self.register_structure_hook_factory(is_counter, self.gen_structure_counter)
+ self.register_structure_hook_factory(is_typeddict, self.gen_structure_typeddict)
+ self.register_structure_hook_factory(
+ lambda t: get_newtype_base(t) is not None, self.get_structure_newtype
+ )
+
+ # We keep these so we can more correctly copy the hooks.
+ self._struct_copy_skip = self._structure_func.get_num_fns()
+ self._unstruct_copy_skip = self._unstructure_func.get_num_fns()
+
+ @overload
+ def register_unstructure_hook_factory(
+ self, predicate: Predicate
+ ) -> Callable[[AnyUnstructureHookFactory], AnyUnstructureHookFactory]: ...
+
+ @overload
+ def register_unstructure_hook_factory(
+ self, predicate: Predicate, factory: UnstructureHookFactory
+ ) -> UnstructureHookFactory: ...
+
+ @overload
+ def register_unstructure_hook_factory(
+ self, predicate: Predicate, factory: ExtendedUnstructureHookFactory[Converter]
+ ) -> ExtendedUnstructureHookFactory[Converter]: ...
+
+ def register_unstructure_hook_factory(self, predicate, factory=None):
+ # This dummy wrapper is required due to how `@overload` works.
+ return super().register_unstructure_hook_factory(predicate, factory)
+
+ @overload
+ def register_structure_hook_factory(
+ self, predicate: Predicate
+ ) -> Callable[[AnyStructureHookFactory], AnyStructureHookFactory]: ...
+
+ @overload
+ def register_structure_hook_factory(
+ self, predicate: Predicate, factory: StructureHookFactory
+ ) -> StructureHookFactory: ...
+
+ @overload
+ def register_structure_hook_factory(
+ self, predicate: Predicate, factory: ExtendedStructureHookFactory[Converter]
+ ) -> ExtendedStructureHookFactory[Converter]: ...
+
+ def register_structure_hook_factory(self, predicate, factory=None):
+ # This dummy wrapper is required due to how `@overload` works.
+ return super().register_structure_hook_factory(predicate, factory)
+
+ def get_structure_newtype(self, type: type[T]) -> Callable[[Any, Any], T]:
+ base = get_newtype_base(type)
+ handler = self.get_structure_hook(base)
+ return lambda v, _: handler(v, base)
+
+ def gen_unstructure_annotated(self, type):
+ origin = type.__origin__
+ return self.get_unstructure_hook(origin)
+
+ def gen_structure_annotated(self, type) -> Callable:
+ """A hook factory for annotated types."""
+ origin = type.__origin__
+ hook = self.get_structure_hook(origin)
+ return lambda v, _: hook(v, origin)
+
+ def gen_unstructure_typeddict(self, cl: Any) -> Callable[[dict], dict]:
+ """Generate a TypedDict unstructure function.
+
+ Also apply converter-scored modifications.
+ """
+ return make_typeddict_dict_unstruct_fn(cl, self)
+
+ def gen_unstructure_attrs_fromdict(
+ self, cl: type[T]
+ ) -> Callable[[T], dict[str, Any]]:
+ origin = get_origin(cl)
+ attribs = fields(origin or cl)
+ if attrs_has(cl) and any(isinstance(a.type, str) for a in attribs):
+ # PEP 563 annotations - need to be resolved.
+ resolve_types(cl)
+ attrib_overrides = {
+ a.name: self.type_overrides[a.type]
+ for a in attribs
+ if a.type in self.type_overrides
+ }
+
+ return make_dict_unstructure_fn(
+ cl, self, _cattrs_omit_if_default=self.omit_if_default, **attrib_overrides
+ )
+
+ def gen_unstructure_optional(self, cl: type[T]) -> Callable[[T], Any]:
+ """Generate an unstructuring hook for optional types."""
+ union_params = cl.__args__
+ other = union_params[0] if union_params[1] is NoneType else union_params[1]
+
+ if isinstance(other, TypeVar):
+ handler = self.unstructure
+ else:
+ handler = self.get_unstructure_hook(other)
+
+ def unstructure_optional(val, _handler=handler):
+ return None if val is None else _handler(val)
+
+ return unstructure_optional
+
+ def gen_structure_typeddict(self, cl: Any) -> Callable[[dict, Any], dict]:
+ """Generate a TypedDict structure function.
+
+ Also apply converter-scored modifications.
+ """
+ return make_typeddict_dict_struct_fn(
+ cl, self, _cattrs_detailed_validation=self.detailed_validation
+ )
+
+ def gen_structure_attrs_fromdict(
+ self, cl: type[T]
+ ) -> Callable[[Mapping[str, Any], Any], T]:
+ attribs = fields(get_origin(cl) or cl if is_generic(cl) else cl)
+ if attrs_has(cl) and any(isinstance(a.type, str) for a in attribs):
+ # PEP 563 annotations - need to be resolved.
+ resolve_types(cl)
+ attrib_overrides = {
+ a.name: self.type_overrides[a.type]
+ for a in attribs
+ if a.type in self.type_overrides
+ }
+ return make_dict_structure_fn(
+ cl,
+ self,
+ _cattrs_forbid_extra_keys=self.forbid_extra_keys,
+ _cattrs_prefer_attrib_converters=self._prefer_attrib_converters,
+ _cattrs_detailed_validation=self.detailed_validation,
+ **attrib_overrides,
+ )
+
+ def gen_unstructure_iterable(
+ self, cl: Any, unstructure_to: Any = None
+ ) -> IterableUnstructureFn:
+ unstructure_to = self._unstruct_collection_overrides.get(
+ get_origin(cl) or cl, unstructure_to or list
+ )
+ h = iterable_unstructure_factory(cl, self, unstructure_to=unstructure_to)
+ self._unstructure_func.register_cls_list([(cl, h)], direct=True)
+ return h
+
+ def gen_unstructure_hetero_tuple(
+ self, cl: Any, unstructure_to: Any = None
+ ) -> HeteroTupleUnstructureFn:
+ unstructure_to = self._unstruct_collection_overrides.get(
+ get_origin(cl) or cl, unstructure_to or tuple
+ )
+ h = make_hetero_tuple_unstructure_fn(cl, self, unstructure_to=unstructure_to)
+ self._unstructure_func.register_cls_list([(cl, h)], direct=True)
+ return h
+
+ def gen_unstructure_mapping(
+ self,
+ cl: Any,
+ unstructure_to: Any = None,
+ key_handler: Callable[[Any, Any | None], Any] | None = None,
+ ) -> MappingUnstructureFn:
+ unstructure_to = self._unstruct_collection_overrides.get(
+ get_origin(cl) or cl, unstructure_to or dict
+ )
+ h = make_mapping_unstructure_fn(
+ cl, self, unstructure_to=unstructure_to, key_handler=key_handler
+ )
+ self._unstructure_func.register_cls_list([(cl, h)], direct=True)
+ return h
+
+ def gen_structure_counter(self, cl: Any) -> MappingStructureFn[T]:
+ h = make_mapping_structure_fn(
+ cl,
+ self,
+ structure_to=Counter,
+ val_type=int,
+ detailed_validation=self.detailed_validation,
+ )
+ self._structure_func.register_cls_list([(cl, h)], direct=True)
+ return h
+
+ def gen_structure_mapping(self, cl: Any) -> MappingStructureFn[T]:
+ structure_to = get_origin(cl) or cl
+ if structure_to in (
+ MutableMapping,
+ AbcMutableMapping,
+ Mapping,
+ AbcMapping,
+ ): # These default to dicts
+ structure_to = dict
+ h = make_mapping_structure_fn(
+ cl, self, structure_to, detailed_validation=self.detailed_validation
+ )
+ self._structure_func.register_cls_list([(cl, h)], direct=True)
+ return h
+
+ def copy(
+ self,
+ dict_factory: Callable[[], Any] | None = None,
+ unstruct_strat: UnstructureStrategy | None = None,
+ omit_if_default: bool | None = None,
+ forbid_extra_keys: bool | None = None,
+ type_overrides: Mapping[type, AttributeOverride] | None = None,
+ unstruct_collection_overrides: Mapping[type, Callable] | None = None,
+ prefer_attrib_converters: bool | None = None,
+ detailed_validation: bool | None = None,
+ ) -> Converter:
+ """Create a copy of the converter, keeping all existing custom hooks.
+
+ :param detailed_validation: Whether to use a slightly slower mode for detailed
+ validation errors.
+ """
+ res = self.__class__(
+ dict_factory if dict_factory is not None else self._dict_factory,
+ (
+ unstruct_strat
+ if unstruct_strat is not None
+ else (
+ UnstructureStrategy.AS_DICT
+ if self._unstructure_attrs == self.unstructure_attrs_asdict
+ else UnstructureStrategy.AS_TUPLE
+ )
+ ),
+ omit_if_default if omit_if_default is not None else self.omit_if_default,
+ (
+ forbid_extra_keys
+ if forbid_extra_keys is not None
+ else self.forbid_extra_keys
+ ),
+ type_overrides if type_overrides is not None else self.type_overrides,
+ (
+ unstruct_collection_overrides
+ if unstruct_collection_overrides is not None
+ else self._unstruct_collection_overrides
+ ),
+ (
+ prefer_attrib_converters
+ if prefer_attrib_converters is not None
+ else self._prefer_attrib_converters
+ ),
+ (
+ detailed_validation
+ if detailed_validation is not None
+ else self.detailed_validation
+ ),
+ )
+
+ self._unstructure_func.copy_to(
+ res._unstructure_func, skip=self._unstruct_copy_skip
+ )
+ self._structure_func.copy_to(res._structure_func, skip=self._struct_copy_skip)
+
+ return res
+
+
+GenConverter: TypeAlias = Converter
diff --git a/lambdas/aws-dd-forwarder-3.127.0/cattrs/disambiguators.py b/lambdas/aws-dd-forwarder-3.127.0/cattrs/disambiguators.py
new file mode 100644
index 0000000..ad36ae3
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/cattrs/disambiguators.py
@@ -0,0 +1,205 @@
+"""Utilities for union (sum type) disambiguation."""
+
+from __future__ import annotations
+
+from collections import defaultdict
+from dataclasses import MISSING
+from functools import reduce
+from operator import or_
+from typing import TYPE_CHECKING, Any, Callable, Literal, Mapping, Union
+
+from attrs import NOTHING, Attribute, AttrsInstance
+
+from ._compat import (
+ NoneType,
+ adapted_fields,
+ fields_dict,
+ get_args,
+ get_origin,
+ has,
+ is_literal,
+ is_union_type,
+)
+from .gen import AttributeOverride
+
+if TYPE_CHECKING:
+ from .converters import BaseConverter
+
+__all__ = ["is_supported_union", "create_default_dis_func"]
+
+
+def is_supported_union(typ: Any) -> bool:
+ """Whether the type is a union of attrs classes."""
+ return is_union_type(typ) and all(
+ e is NoneType or has(get_origin(e) or e) for e in typ.__args__
+ )
+
+
+def create_default_dis_func(
+ converter: BaseConverter,
+ *classes: type[AttrsInstance],
+ use_literals: bool = True,
+ overrides: (
+ dict[str, AttributeOverride] | Literal["from_converter"]
+ ) = "from_converter",
+) -> Callable[[Mapping[Any, Any]], type[Any] | None]:
+ """Given attrs classes or dataclasses, generate a disambiguation function.
+
+ The function is based on unique fields without defaults or unique values.
+
+ :param use_literals: Whether to try using fields annotated as literals for
+ disambiguation.
+ :param overrides: Attribute overrides to apply.
+
+ .. versionchanged:: 24.1.0
+ Dataclasses are now supported.
+ """
+ if len(classes) < 2:
+ raise ValueError("At least two classes required.")
+
+ if overrides == "from_converter":
+ overrides = [
+ getattr(converter.get_structure_hook(c), "overrides", {}) for c in classes
+ ]
+ else:
+ overrides = [overrides for _ in classes]
+
+ # first, attempt for unique values
+ if use_literals:
+ # requirements for a discriminator field:
+ # (... TODO: a single fallback is OK)
+ # - it must always be enumerated
+ cls_candidates = [
+ {
+ at.name
+ for at in adapted_fields(get_origin(cl) or cl)
+ if is_literal(at.type)
+ }
+ for cl in classes
+ ]
+
+ # literal field names common to all members
+ discriminators: set[str] = cls_candidates[0]
+ for possible_discriminators in cls_candidates:
+ discriminators &= possible_discriminators
+
+ best_result = None
+ best_discriminator = None
+ for discriminator in discriminators:
+ # maps Literal values (strings, ints...) to classes
+ mapping = defaultdict(list)
+
+ for cl in classes:
+ for key in get_args(
+ fields_dict(get_origin(cl) or cl)[discriminator].type
+ ):
+ mapping[key].append(cl)
+
+ if best_result is None or max(len(v) for v in mapping.values()) <= max(
+ len(v) for v in best_result.values()
+ ):
+ best_result = mapping
+ best_discriminator = discriminator
+
+ if (
+ best_result
+ and best_discriminator
+ and max(len(v) for v in best_result.values()) != len(classes)
+ ):
+ final_mapping = {
+ k: v[0] if len(v) == 1 else Union[tuple(v)]
+ for k, v in best_result.items()
+ }
+
+ def dis_func(data: Mapping[Any, Any]) -> type | None:
+ if not isinstance(data, Mapping):
+ raise ValueError("Only input mappings are supported.")
+ return final_mapping[data[best_discriminator]]
+
+ return dis_func
+
+ # next, attempt for unique keys
+
+ # NOTE: This could just as well work with just field availability and not
+ # uniqueness, returning Unions ... it doesn't do that right now.
+ cls_and_attrs = [
+ (cl, *_usable_attribute_names(cl, override))
+ for cl, override in zip(classes, overrides)
+ ]
+ # For each class, attempt to generate a single unique required field.
+ uniq_attrs_dict: dict[str, type] = {}
+
+ # We start from classes with the largest number of unique fields
+ # so we can do easy picks first, making later picks easier.
+ cls_and_attrs.sort(key=lambda c_a: len(c_a[1]), reverse=True)
+
+ fallback = None # If none match, try this.
+
+ for cl, cl_reqs, back_map in cls_and_attrs:
+ # We do not have to consider classes we've already processed, since
+ # they will have been eliminated by the match dictionary already.
+ other_classes = [
+ c_and_a
+ for c_and_a in cls_and_attrs
+ if c_and_a[0] is not cl and c_and_a[0] not in uniq_attrs_dict.values()
+ ]
+ other_reqs = reduce(or_, (c_a[1] for c_a in other_classes), set())
+ uniq = cl_reqs - other_reqs
+
+ # We want a unique attribute with no default.
+ cl_fields = fields_dict(get_origin(cl) or cl)
+ for maybe_renamed_attr_name in uniq:
+ orig_name = back_map[maybe_renamed_attr_name]
+ if cl_fields[orig_name].default in (NOTHING, MISSING):
+ break
+ else:
+ if fallback is None:
+ fallback = cl
+ continue
+ raise TypeError(f"{cl} has no usable non-default attributes")
+ uniq_attrs_dict[maybe_renamed_attr_name] = cl
+
+ if fallback is None:
+
+ def dis_func(data: Mapping[Any, Any]) -> type[AttrsInstance] | None:
+ if not isinstance(data, Mapping):
+ raise ValueError("Only input mappings are supported")
+ for k, v in uniq_attrs_dict.items():
+ if k in data:
+ return v
+ raise ValueError("Couldn't disambiguate")
+
+ else:
+
+ def dis_func(data: Mapping[Any, Any]) -> type[AttrsInstance] | None:
+ if not isinstance(data, Mapping):
+ raise ValueError("Only input mappings are supported")
+ for k, v in uniq_attrs_dict.items():
+ if k in data:
+ return v
+ return fallback
+
+ return dis_func
+
+
+create_uniq_field_dis_func = create_default_dis_func
+
+
+def _overriden_name(at: Attribute, override: AttributeOverride | None) -> str:
+ if override is None or override.rename is None:
+ return at.name
+ return override.rename
+
+
+def _usable_attribute_names(
+ cl: type[Any], overrides: dict[str, AttributeOverride]
+) -> tuple[set[str], dict[str, str]]:
+ """Return renamed fields and a mapping to original field names."""
+ res = set()
+ mapping = {}
+
+ for at in adapted_fields(get_origin(cl) or cl):
+ res.add(n := _overriden_name(at, overrides.get(at.name)))
+ mapping[n] = at.name
+
+ return res, mapping
diff --git a/lambdas/aws-dd-forwarder-3.127.0/cattrs/dispatch.py b/lambdas/aws-dd-forwarder-3.127.0/cattrs/dispatch.py
new file mode 100644
index 0000000..3d746db
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/cattrs/dispatch.py
@@ -0,0 +1,194 @@
+from __future__ import annotations
+
+from functools import lru_cache, singledispatch
+from typing import TYPE_CHECKING, Any, Callable, Generic, Literal, TypeVar
+
+from attrs import Factory, define
+
+from ._compat import TypeAlias
+from .fns import Predicate
+
+if TYPE_CHECKING:
+ from .converters import BaseConverter
+
+TargetType: TypeAlias = Any
+UnstructuredValue: TypeAlias = Any
+StructuredValue: TypeAlias = Any
+
+StructureHook: TypeAlias = Callable[[UnstructuredValue, TargetType], StructuredValue]
+UnstructureHook: TypeAlias = Callable[[StructuredValue], UnstructuredValue]
+
+Hook = TypeVar("Hook", StructureHook, UnstructureHook)
+HookFactory: TypeAlias = Callable[[TargetType], Hook]
+
+
+@define
+class _DispatchNotFound:
+ """A dummy object to help signify a dispatch not found."""
+
+
+@define
+class FunctionDispatch:
+ """
+ FunctionDispatch is similar to functools.singledispatch, but
+ instead dispatches based on functions that take the type of the
+ first argument in the method, and return True or False.
+
+ objects that help determine dispatch should be instantiated objects.
+
+ :param converter: A converter to be used for factories that require converters.
+
+ .. versionchanged:: 24.1.0
+ Support for factories that require converters, hence this requires a
+ converter when creating.
+ """
+
+ _converter: BaseConverter
+ _handler_pairs: list[tuple[Predicate, Callable[[Any, Any], Any], bool, bool]] = (
+ Factory(list)
+ )
+
+ def register(
+ self,
+ predicate: Predicate,
+ func: Callable[..., Any],
+ is_generator=False,
+ takes_converter=False,
+ ) -> None:
+ self._handler_pairs.insert(0, (predicate, func, is_generator, takes_converter))
+
+ def dispatch(self, typ: Any) -> Callable[..., Any] | None:
+ """
+ Return the appropriate handler for the object passed.
+ """
+ for can_handle, handler, is_generator, takes_converter in self._handler_pairs:
+ # can handle could raise an exception here
+ # such as issubclass being called on an instance.
+ # it's easier to just ignore that case.
+ try:
+ ch = can_handle(typ)
+ except Exception: # noqa: S112
+ continue
+ if ch:
+ if is_generator:
+ if takes_converter:
+ return handler(typ, self._converter)
+ return handler(typ)
+
+ return handler
+ return None
+
+ def get_num_fns(self) -> int:
+ return len(self._handler_pairs)
+
+ def copy_to(self, other: FunctionDispatch, skip: int = 0) -> None:
+ other._handler_pairs = self._handler_pairs[:-skip] + other._handler_pairs
+
+
+@define(init=False)
+class MultiStrategyDispatch(Generic[Hook]):
+ """
+ MultiStrategyDispatch uses a combination of exact-match dispatch,
+ singledispatch, and FunctionDispatch.
+
+ :param converter: A converter to be used for factories that require converters.
+ :param fallback_factory: A hook factory to be called when a hook cannot be
+ produced.
+
+ .. versionchanged:: 23.2.0
+ Fallbacks are now factories.
+ .. versionchanged:: 24.1.0
+ Support for factories that require converters, hence this requires a
+ converter when creating.
+ """
+
+ _fallback_factory: HookFactory[Hook]
+ _converter: BaseConverter
+ _direct_dispatch: dict[TargetType, Hook]
+ _function_dispatch: FunctionDispatch
+ _single_dispatch: Any
+ dispatch: Callable[[TargetType, BaseConverter], Hook]
+
+ def __init__(
+ self, fallback_factory: HookFactory[Hook], converter: BaseConverter
+ ) -> None:
+ self._fallback_factory = fallback_factory
+ self._direct_dispatch = {}
+ self._function_dispatch = FunctionDispatch(converter)
+ self._single_dispatch = singledispatch(_DispatchNotFound)
+ self.dispatch = lru_cache(maxsize=None)(self.dispatch_without_caching)
+
+ def dispatch_without_caching(self, typ: TargetType) -> Hook:
+ """Dispatch on the type but without caching the result."""
+ try:
+ dispatch = self._single_dispatch.dispatch(typ)
+ if dispatch is not _DispatchNotFound:
+ return dispatch
+ except Exception: # noqa: S110
+ pass
+
+ direct_dispatch = self._direct_dispatch.get(typ)
+ if direct_dispatch is not None:
+ return direct_dispatch
+
+ res = self._function_dispatch.dispatch(typ)
+ return res if res is not None else self._fallback_factory(typ)
+
+ def register_cls_list(self, cls_and_handler, direct: bool = False) -> None:
+ """Register a class to direct or singledispatch."""
+ for cls, handler in cls_and_handler:
+ if direct:
+ self._direct_dispatch[cls] = handler
+ else:
+ self._single_dispatch.register(cls, handler)
+ self.clear_direct()
+ self.dispatch.cache_clear()
+
+ def register_func_list(
+ self,
+ pred_and_handler: list[
+ tuple[Predicate, Any]
+ | tuple[Predicate, Any, bool]
+ | tuple[Predicate, Callable[[Any, BaseConverter], Any], Literal["extended"]]
+ ],
+ ):
+ """
+ Register a predicate function to determine if the handler
+ should be used for the type.
+
+ :param pred_and_handler: The list of predicates and their associated
+ handlers. If a handler is registered in `extended` mode, it's a
+ factory that requires a converter.
+ """
+ for tup in pred_and_handler:
+ if len(tup) == 2:
+ func, handler = tup
+ self._function_dispatch.register(func, handler)
+ else:
+ func, handler, is_gen = tup
+ if is_gen == "extended":
+ self._function_dispatch.register(
+ func, handler, is_generator=is_gen, takes_converter=True
+ )
+ else:
+ self._function_dispatch.register(func, handler, is_generator=is_gen)
+ self.clear_direct()
+ self.dispatch.cache_clear()
+
+ def clear_direct(self) -> None:
+ """Clear the direct dispatch."""
+ self._direct_dispatch.clear()
+
+ def clear_cache(self) -> None:
+ """Clear all caches."""
+ self._direct_dispatch.clear()
+ self.dispatch.cache_clear()
+
+ def get_num_fns(self) -> int:
+ return self._function_dispatch.get_num_fns()
+
+ def copy_to(self, other: MultiStrategyDispatch, skip: int = 0) -> None:
+ self._function_dispatch.copy_to(other._function_dispatch, skip=skip)
+ for cls, fn in self._single_dispatch.registry.items():
+ other._single_dispatch.register(cls, fn)
+ other.clear_cache()
diff --git a/lambdas/aws-dd-forwarder-3.127.0/cattrs/errors.py b/lambdas/aws-dd-forwarder-3.127.0/cattrs/errors.py
new file mode 100644
index 0000000..9148bf1
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/cattrs/errors.py
@@ -0,0 +1,129 @@
+from typing import Any, List, Optional, Set, Tuple, Type, Union
+
+from cattrs._compat import ExceptionGroup
+
+
+class StructureHandlerNotFoundError(Exception):
+ """
+ Error raised when structuring cannot find a handler for converting inputs into
+ :attr:`type_`.
+ """
+
+ def __init__(self, message: str, type_: Type) -> None:
+ super().__init__(message)
+ self.type_ = type_
+
+
+class BaseValidationError(ExceptionGroup):
+ cl: Type
+
+ def __new__(cls, message, excs, cl: Type):
+ obj = super().__new__(cls, message, excs)
+ obj.cl = cl
+ return obj
+
+ def derive(self, excs):
+ return ClassValidationError(self.message, excs, self.cl)
+
+
+class IterableValidationNote(str):
+ """Attached as a note to an exception when an iterable element fails structuring."""
+
+ index: Union[int, str] # Ints for list indices, strs for dict keys
+ type: Any
+
+ def __new__(
+ cls, string: str, index: Union[int, str], type: Any
+ ) -> "IterableValidationNote":
+ instance = str.__new__(cls, string)
+ instance.index = index
+ instance.type = type
+ return instance
+
+ def __getnewargs__(self) -> Tuple[str, Union[int, str], Any]:
+ return (str(self), self.index, self.type)
+
+
+class IterableValidationError(BaseValidationError):
+ """Raised when structuring an iterable."""
+
+ def group_exceptions(
+ self,
+ ) -> Tuple[List[Tuple[Exception, IterableValidationNote]], List[Exception]]:
+ """Split the exceptions into two groups: with and without validation notes."""
+ excs_with_notes = []
+ other_excs = []
+ for subexc in self.exceptions:
+ if hasattr(subexc, "__notes__"):
+ for note in subexc.__notes__:
+ if note.__class__ is IterableValidationNote:
+ excs_with_notes.append((subexc, note))
+ break
+ else:
+ other_excs.append(subexc)
+ else:
+ other_excs.append(subexc)
+
+ return excs_with_notes, other_excs
+
+
+class AttributeValidationNote(str):
+ """Attached as a note to an exception when an attribute fails structuring."""
+
+ name: str
+ type: Any
+
+ def __new__(cls, string: str, name: str, type: Any) -> "AttributeValidationNote":
+ instance = str.__new__(cls, string)
+ instance.name = name
+ instance.type = type
+ return instance
+
+ def __getnewargs__(self) -> Tuple[str, str, Any]:
+ return (str(self), self.name, self.type)
+
+
+class ClassValidationError(BaseValidationError):
+ """Raised when validating a class if any attributes are invalid."""
+
+ def group_exceptions(
+ self,
+ ) -> Tuple[List[Tuple[Exception, AttributeValidationNote]], List[Exception]]:
+ """Split the exceptions into two groups: with and without validation notes."""
+ excs_with_notes = []
+ other_excs = []
+ for subexc in self.exceptions:
+ if hasattr(subexc, "__notes__"):
+ for note in subexc.__notes__:
+ if note.__class__ is AttributeValidationNote:
+ excs_with_notes.append((subexc, note))
+ break
+ else:
+ other_excs.append(subexc)
+ else:
+ other_excs.append(subexc)
+
+ return excs_with_notes, other_excs
+
+
+class ForbiddenExtraKeysError(Exception):
+ """
+ Raised when `forbid_extra_keys` is activated and such extra keys are detected
+ during structuring.
+
+ The attribute `extra_fields` is a sequence of those extra keys, which were the
+ cause of this error, and `cl` is the class which was structured with those extra
+ keys.
+ """
+
+ def __init__(
+ self, message: Optional[str], cl: Type, extra_fields: Set[str]
+ ) -> None:
+ self.cl = cl
+ self.extra_fields = extra_fields
+ cln = cl.__name__
+
+ super().__init__(
+ message
+ or f"Extra fields in constructor for {cln}: {', '.join(extra_fields)}"
+ )
diff --git a/lambdas/aws-dd-forwarder-3.127.0/cattrs/fns.py b/lambdas/aws-dd-forwarder-3.127.0/cattrs/fns.py
new file mode 100644
index 0000000..748cfb3
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/cattrs/fns.py
@@ -0,0 +1,22 @@
+"""Useful internal functions."""
+
+from typing import Any, Callable, NoReturn, Type, TypeVar
+
+from ._compat import TypeAlias
+from .errors import StructureHandlerNotFoundError
+
+T = TypeVar("T")
+
+Predicate: TypeAlias = Callable[[Any], bool]
+"""A predicate function determines if a type can be handled."""
+
+
+def identity(obj: T) -> T:
+ """The identity function."""
+ return obj
+
+
+def raise_error(_, cl: Type) -> NoReturn:
+ """At the bottom of the condition stack, we explode if we can't handle it."""
+ msg = f"Unsupported type: {cl!r}. Register a structure hook for it."
+ raise StructureHandlerNotFoundError(msg, type_=cl)
diff --git a/lambdas/aws-dd-forwarder-3.127.0/cattrs/gen/__init__.py b/lambdas/aws-dd-forwarder-3.127.0/cattrs/gen/__init__.py
new file mode 100644
index 0000000..97d2876
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/cattrs/gen/__init__.py
@@ -0,0 +1,1053 @@
+from __future__ import annotations
+
+import re
+from typing import (
+ TYPE_CHECKING,
+ Any,
+ Callable,
+ Final,
+ Iterable,
+ Literal,
+ Mapping,
+ Tuple,
+ TypeVar,
+)
+
+from attrs import NOTHING, Attribute, Factory, resolve_types
+
+from .._compat import (
+ ANIES,
+ TypeAlias,
+ adapted_fields,
+ get_args,
+ get_origin,
+ is_annotated,
+ is_bare,
+ is_bare_final,
+ is_generic,
+)
+from .._generics import deep_copy_with
+from ..dispatch import UnstructureHook
+from ..errors import (
+ AttributeValidationNote,
+ ClassValidationError,
+ ForbiddenExtraKeysError,
+ IterableValidationError,
+ IterableValidationNote,
+ StructureHandlerNotFoundError,
+)
+from ..fns import identity
+from ._consts import AttributeOverride, already_generating, neutral
+from ._generics import generate_mapping
+from ._lc import generate_unique_filename
+from ._shared import find_structure_handler
+
+if TYPE_CHECKING:
+ from ..converters import BaseConverter
+
+__all__ = [
+ "make_dict_unstructure_fn",
+ "make_dict_structure_fn",
+ "make_iterable_unstructure_fn",
+ "make_hetero_tuple_unstructure_fn",
+ "make_mapping_unstructure_fn",
+ "make_mapping_structure_fn",
+ "make_dict_unstructure_fn_from_attrs",
+ "make_dict_structure_fn_from_attrs",
+]
+
+
+def override(
+ omit_if_default: bool | None = None,
+ rename: str | None = None,
+ omit: bool | None = None,
+ struct_hook: Callable[[Any, Any], Any] | None = None,
+ unstruct_hook: Callable[[Any], Any] | None = None,
+) -> AttributeOverride:
+ """Override how a particular field is handled.
+
+ :param omit: Whether to skip the field or not. `None` means apply default handling.
+ """
+ return AttributeOverride(omit_if_default, rename, omit, struct_hook, unstruct_hook)
+
+
+T = TypeVar("T")
+
+
+def make_dict_unstructure_fn_from_attrs(
+ attrs: list[Attribute],
+ cl: type,
+ converter: BaseConverter,
+ typevar_map: dict[str, Any] = {},
+ _cattrs_omit_if_default: bool = False,
+ _cattrs_use_linecache: bool = True,
+ _cattrs_use_alias: bool = False,
+ _cattrs_include_init_false: bool = False,
+ **kwargs: AttributeOverride,
+) -> Callable[[T], dict[str, Any]]:
+ """
+ Generate a specialized dict unstructuring function for a list of attributes.
+
+ Usually used as a building block by more specialized hook factories.
+
+ Any provided overrides are attached to the generated function under the
+ `overrides` attribute.
+
+ :param cl: The class for which the function is generated; used mostly for its name,
+ module name and qualname.
+ :param _cattrs_omit_if_default: if true, attributes equal to their default values
+ will be omitted in the result dictionary.
+ :param _cattrs_use_alias: If true, the attribute alias will be used as the
+ dictionary key by default.
+ :param _cattrs_include_init_false: If true, _attrs_ fields marked as `init=False`
+ will be included.
+
+ .. versionadded:: 24.1.0
+ """
+
+ fn_name = "unstructure_" + cl.__name__
+ globs = {}
+ lines = []
+ invocation_lines = []
+ internal_arg_parts = {}
+
+ for a in attrs:
+ attr_name = a.name
+ override = kwargs.get(attr_name, neutral)
+ if override.omit:
+ continue
+ if override.omit is None and not a.init and not _cattrs_include_init_false:
+ continue
+ if override.rename is None:
+ kn = attr_name if not _cattrs_use_alias else a.alias
+ else:
+ kn = override.rename
+ d = a.default
+
+ # For each attribute, we try resolving the type here and now.
+ # If a type is manually overwritten, this function should be
+ # regenerated.
+ handler = None
+ if override.unstruct_hook is not None:
+ handler = override.unstruct_hook
+ else:
+ if a.type is not None:
+ t = a.type
+ if isinstance(t, TypeVar):
+ if t.__name__ in typevar_map:
+ t = typevar_map[t.__name__]
+ else:
+ handler = converter.unstructure
+ elif is_generic(t) and not is_bare(t) and not is_annotated(t):
+ t = deep_copy_with(t, typevar_map)
+
+ if handler is None:
+ if (
+ is_bare_final(t)
+ and a.default is not NOTHING
+ and not isinstance(a.default, Factory)
+ ):
+ # This is a special case where we can use the
+ # type of the default to dispatch on.
+ t = a.default.__class__
+ try:
+ handler = converter.get_unstructure_hook(t, cache_result=False)
+ except RecursionError:
+ # There's a circular reference somewhere down the line
+ handler = converter.unstructure
+ else:
+ handler = converter.unstructure
+
+ is_identity = handler == identity
+
+ if not is_identity:
+ unstruct_handler_name = f"__c_unstr_{attr_name}"
+ globs[unstruct_handler_name] = handler
+ internal_arg_parts[unstruct_handler_name] = handler
+ invoke = f"{unstruct_handler_name}(instance.{attr_name})"
+ else:
+ invoke = f"instance.{attr_name}"
+
+ if d is not NOTHING and (
+ (_cattrs_omit_if_default and override.omit_if_default is not False)
+ or override.omit_if_default
+ ):
+ def_name = f"__c_def_{attr_name}"
+
+ if isinstance(d, Factory):
+ globs[def_name] = d.factory
+ internal_arg_parts[def_name] = d.factory
+ if d.takes_self:
+ lines.append(f" if instance.{attr_name} != {def_name}(instance):")
+ else:
+ lines.append(f" if instance.{attr_name} != {def_name}():")
+ lines.append(f" res['{kn}'] = {invoke}")
+ else:
+ globs[def_name] = d
+ internal_arg_parts[def_name] = d
+ lines.append(f" if instance.{attr_name} != {def_name}:")
+ lines.append(f" res['{kn}'] = {invoke}")
+
+ else:
+ # No default or no override.
+ invocation_lines.append(f"'{kn}': {invoke},")
+
+ internal_arg_line = ", ".join([f"{i}={i}" for i in internal_arg_parts])
+ if internal_arg_line:
+ internal_arg_line = f", {internal_arg_line}"
+ for k, v in internal_arg_parts.items():
+ globs[k] = v
+
+ total_lines = (
+ [f"def {fn_name}(instance{internal_arg_line}):"]
+ + [" res = {"]
+ + [f" {line}" for line in invocation_lines]
+ + [" }"]
+ + lines
+ + [" return res"]
+ )
+ script = "\n".join(total_lines)
+ fname = generate_unique_filename(
+ cl, "unstructure", lines=total_lines if _cattrs_use_linecache else []
+ )
+
+ eval(compile(script, fname, "exec"), globs)
+
+ res = globs[fn_name]
+ res.overrides = kwargs
+
+ return res
+
+
+def make_dict_unstructure_fn(
+ cl: type[T],
+ converter: BaseConverter,
+ _cattrs_omit_if_default: bool = False,
+ _cattrs_use_linecache: bool = True,
+ _cattrs_use_alias: bool = False,
+ _cattrs_include_init_false: bool = False,
+ **kwargs: AttributeOverride,
+) -> Callable[[T], dict[str, Any]]:
+ """
+ Generate a specialized dict unstructuring function for an attrs class or a
+ dataclass.
+
+ Any provided overrides are attached to the generated function under the
+ `overrides` attribute.
+
+ :param _cattrs_omit_if_default: if true, attributes equal to their default values
+ will be omitted in the result dictionary.
+ :param _cattrs_use_alias: If true, the attribute alias will be used as the
+ dictionary key by default.
+ :param _cattrs_include_init_false: If true, _attrs_ fields marked as `init=False`
+ will be included.
+
+ .. versionadded:: 23.2.0 *_cattrs_use_alias*
+ .. versionadded:: 23.2.0 *_cattrs_include_init_false*
+ """
+ origin = get_origin(cl)
+ attrs = adapted_fields(origin or cl) # type: ignore
+
+ if any(isinstance(a.type, str) for a in attrs):
+ # PEP 563 annotations - need to be resolved.
+ resolve_types(cl)
+
+ mapping = {}
+ if is_generic(cl):
+ mapping = generate_mapping(cl, mapping)
+
+ for base in getattr(origin, "__orig_bases__", ()):
+ if is_generic(base) and not str(base).startswith("typing.Generic"):
+ mapping = generate_mapping(base, mapping)
+ break
+ if origin is not None:
+ cl = origin
+
+ # We keep track of what we're generating to help with recursive
+ # class graphs.
+ try:
+ working_set = already_generating.working_set
+ except AttributeError:
+ working_set = set()
+ already_generating.working_set = working_set
+ if cl in working_set:
+ raise RecursionError()
+
+ working_set.add(cl)
+
+ try:
+ return make_dict_unstructure_fn_from_attrs(
+ attrs,
+ cl,
+ converter,
+ mapping,
+ _cattrs_omit_if_default=_cattrs_omit_if_default,
+ _cattrs_use_linecache=_cattrs_use_linecache,
+ _cattrs_use_alias=_cattrs_use_alias,
+ _cattrs_include_init_false=_cattrs_include_init_false,
+ **kwargs,
+ )
+ finally:
+ working_set.remove(cl)
+ if not working_set:
+ del already_generating.working_set
+
+
+DictStructureFn = Callable[[Mapping[str, Any], Any], T]
+
+
+def make_dict_structure_fn_from_attrs(
+ attrs: list[Attribute],
+ cl: type,
+ converter: BaseConverter,
+ typevar_map: dict[str, Any] = {},
+ _cattrs_forbid_extra_keys: bool | Literal["from_converter"] = "from_converter",
+ _cattrs_use_linecache: bool = True,
+ _cattrs_prefer_attrib_converters: (
+ bool | Literal["from_converter"]
+ ) = "from_converter",
+ _cattrs_detailed_validation: bool | Literal["from_converter"] = "from_converter",
+ _cattrs_use_alias: bool = False,
+ _cattrs_include_init_false: bool = False,
+ **kwargs: AttributeOverride,
+) -> DictStructureFn[T]:
+ """
+ Generate a specialized dict structuring function for a list of attributes.
+
+ Usually used as a building block by more specialized hook factories.
+
+ Any provided overrides are attached to the generated function under the
+ `overrides` attribute.
+
+ :param _cattrs_forbid_extra_keys: Whether the structuring function should raise a
+ `ForbiddenExtraKeysError` if unknown keys are encountered.
+ :param _cattrs_use_linecache: Whether to store the source code in the Python
+ linecache.
+ :param _cattrs_prefer_attrib_converters: If an _attrs_ converter is present on a
+ field, use it instead of processing the field normally.
+ :param _cattrs_detailed_validation: Whether to use a slower mode that produces
+ more detailed errors.
+ :param _cattrs_use_alias: If true, the attribute alias will be used as the
+ dictionary key by default.
+ :param _cattrs_include_init_false: If true, _attrs_ fields marked as `init=False`
+ will be included.
+
+ .. versionadded:: 24.1.0
+ """
+
+ cl_name = cl.__name__
+ fn_name = "structure_" + cl_name
+
+ # We have generic parameters and need to generate a unique name for the function
+ for p in getattr(cl, "__parameters__", ()):
+ # This is nasty, I am not sure how best to handle `typing.List[str]` or
+ # `TClass[int, int]` as a parameter type here
+ try:
+ name_base = typevar_map[p.__name__]
+ except KeyError:
+ pn = p.__name__
+ raise StructureHandlerNotFoundError(
+ f"Missing type for generic argument {pn}, specify it when structuring.",
+ p,
+ ) from None
+ name = getattr(name_base, "__name__", None) or str(name_base)
+ # `<>` can be present in lambdas
+ # `|` can be present in unions
+ name = re.sub(r"[\[\.\] ,<>]", "_", name)
+ name = re.sub(r"\|", "u", name)
+ fn_name += f"_{name}"
+
+ internal_arg_parts = {"__cl": cl}
+ globs = {}
+ lines = []
+ post_lines = []
+ pi_lines = [] # post instantiation lines
+ invocation_lines = []
+
+ allowed_fields = set()
+ if _cattrs_forbid_extra_keys == "from_converter":
+ # BaseConverter doesn't have it so we're careful.
+ _cattrs_forbid_extra_keys = getattr(converter, "forbid_extra_keys", False)
+ if _cattrs_detailed_validation == "from_converter":
+ _cattrs_detailed_validation = converter.detailed_validation
+ if _cattrs_prefer_attrib_converters == "from_converter":
+ _cattrs_prefer_attrib_converters = converter._prefer_attrib_converters
+
+ if _cattrs_forbid_extra_keys:
+ globs["__c_a"] = allowed_fields
+ globs["__c_feke"] = ForbiddenExtraKeysError
+
+ if _cattrs_detailed_validation:
+ lines.append(" res = {}")
+ lines.append(" errors = []")
+ invocation_lines.append("**res,")
+ internal_arg_parts["__c_cve"] = ClassValidationError
+ internal_arg_parts["__c_avn"] = AttributeValidationNote
+ for a in attrs:
+ an = a.name
+ override = kwargs.get(an, neutral)
+ if override.omit:
+ continue
+ if override.omit is None and not a.init and not _cattrs_include_init_false:
+ continue
+ t = a.type
+ if isinstance(t, TypeVar):
+ t = typevar_map.get(t.__name__, t)
+ elif is_generic(t) and not is_bare(t) and not is_annotated(t):
+ t = deep_copy_with(t, typevar_map)
+
+ # For each attribute, we try resolving the type here and now.
+ # If a type is manually overwritten, this function should be
+ # regenerated.
+ if override.struct_hook is not None:
+ # If the user has requested an override, just use that.
+ handler = override.struct_hook
+ else:
+ handler = find_structure_handler(
+ a, t, converter, _cattrs_prefer_attrib_converters
+ )
+
+ struct_handler_name = f"__c_structure_{an}"
+ if handler is not None:
+ internal_arg_parts[struct_handler_name] = handler
+
+ ian = a.alias
+ if override.rename is None:
+ kn = an if not _cattrs_use_alias else a.alias
+ else:
+ kn = override.rename
+
+ allowed_fields.add(kn)
+ i = " "
+
+ if not a.init:
+ if a.default is not NOTHING:
+ pi_lines.append(f"{i}if '{kn}' in o:")
+ i = f"{i} "
+ pi_lines.append(f"{i}try:")
+ i = f"{i} "
+ type_name = f"__c_type_{an}"
+ internal_arg_parts[type_name] = t
+ if handler is not None:
+ if handler == converter._structure_call:
+ internal_arg_parts[struct_handler_name] = t
+ pi_lines.append(
+ f"{i}instance.{an} = {struct_handler_name}(o['{kn}'])"
+ )
+ else:
+ tn = f"__c_type_{an}"
+ internal_arg_parts[tn] = t
+ pi_lines.append(
+ f"{i}instance.{an} = {struct_handler_name}(o['{kn}'], {tn})"
+ )
+ else:
+ pi_lines.append(f"{i}instance.{an} = o['{kn}']")
+ i = i[:-2]
+ pi_lines.append(f"{i}except Exception as e:")
+ i = f"{i} "
+ pi_lines.append(
+ f'{i}e.__notes__ = getattr(e, \'__notes__\', []) + [__c_avn("Structuring class {cl.__qualname__} @ attribute {an}", "{an}", __c_type_{an})]'
+ )
+ pi_lines.append(f"{i}errors.append(e)")
+
+ else:
+ if a.default is not NOTHING:
+ lines.append(f"{i}if '{kn}' in o:")
+ i = f"{i} "
+ lines.append(f"{i}try:")
+ i = f"{i} "
+ type_name = f"__c_type_{an}"
+ internal_arg_parts[type_name] = t
+ if handler:
+ if handler == converter._structure_call:
+ internal_arg_parts[struct_handler_name] = t
+ lines.append(
+ f"{i}res['{ian}'] = {struct_handler_name}(o['{kn}'])"
+ )
+ else:
+ tn = f"__c_type_{an}"
+ internal_arg_parts[tn] = t
+ lines.append(
+ f"{i}res['{ian}'] = {struct_handler_name}(o['{kn}'], {tn})"
+ )
+ else:
+ lines.append(f"{i}res['{ian}'] = o['{kn}']")
+ i = i[:-2]
+ lines.append(f"{i}except Exception as e:")
+ i = f"{i} "
+ lines.append(
+ f'{i}e.__notes__ = getattr(e, \'__notes__\', []) + [__c_avn("Structuring class {cl.__qualname__} @ attribute {an}", "{an}", __c_type_{an})]'
+ )
+ lines.append(f"{i}errors.append(e)")
+
+ if _cattrs_forbid_extra_keys:
+ post_lines += [
+ " unknown_fields = set(o.keys()) - __c_a",
+ " if unknown_fields:",
+ " errors.append(__c_feke('', __cl, unknown_fields))",
+ ]
+
+ post_lines.append(
+ f" if errors: raise __c_cve('While structuring ' + {cl_name!r}, errors, __cl)"
+ )
+ if not pi_lines:
+ instantiation_lines = (
+ [" try:"]
+ + [" return __cl("]
+ + [f" {line}" for line in invocation_lines]
+ + [" )"]
+ + [
+ f" except Exception as exc: raise __c_cve('While structuring ' + {cl_name!r}, [exc], __cl)"
+ ]
+ )
+ else:
+ instantiation_lines = (
+ [" try:"]
+ + [" instance = __cl("]
+ + [f" {line}" for line in invocation_lines]
+ + [" )"]
+ + [
+ f" except Exception as exc: raise __c_cve('While structuring ' + {cl_name!r}, [exc], __cl)"
+ ]
+ )
+ pi_lines.append(" return instance")
+ else:
+ non_required = []
+ # The first loop deals with required args.
+ for a in attrs:
+ an = a.name
+ override = kwargs.get(an, neutral)
+ if override.omit:
+ continue
+ if override.omit is None and not a.init and not _cattrs_include_init_false:
+ continue
+ if a.default is not NOTHING:
+ non_required.append(a)
+ continue
+ t = a.type
+ if isinstance(t, TypeVar):
+ t = typevar_map.get(t.__name__, t)
+ elif is_generic(t) and not is_bare(t) and not is_annotated(t):
+ t = deep_copy_with(t, typevar_map)
+
+ # For each attribute, we try resolving the type here and now.
+ # If a type is manually overwritten, this function should be
+ # regenerated.
+ if override.struct_hook is not None:
+ # If the user has requested an override, just use that.
+ handler = override.struct_hook
+ else:
+ handler = find_structure_handler(
+ a, t, converter, _cattrs_prefer_attrib_converters
+ )
+
+ if override.rename is None:
+ kn = an if not _cattrs_use_alias else a.alias
+ else:
+ kn = override.rename
+ allowed_fields.add(kn)
+
+ if not a.init:
+ if handler is not None:
+ struct_handler_name = f"__c_structure_{an}"
+ internal_arg_parts[struct_handler_name] = handler
+ if handler == converter._structure_call:
+ internal_arg_parts[struct_handler_name] = t
+ pi_line = f" instance.{an} = {struct_handler_name}(o['{kn}'])"
+ else:
+ tn = f"__c_type_{an}"
+ internal_arg_parts[tn] = t
+ pi_line = (
+ f" instance.{an} = {struct_handler_name}(o['{kn}'], {tn})"
+ )
+ else:
+ pi_line = f" instance.{an} = o['{kn}']"
+
+ pi_lines.append(pi_line)
+ else:
+ if handler:
+ struct_handler_name = f"__c_structure_{an}"
+ internal_arg_parts[struct_handler_name] = handler
+ if handler == converter._structure_call:
+ internal_arg_parts[struct_handler_name] = t
+ invocation_line = f"{struct_handler_name}(o['{kn}']),"
+ else:
+ tn = f"__c_type_{an}"
+ internal_arg_parts[tn] = t
+ invocation_line = f"{struct_handler_name}(o['{kn}'], {tn}),"
+ else:
+ invocation_line = f"o['{kn}'],"
+
+ if a.kw_only:
+ invocation_line = f"{a.alias}={invocation_line}"
+ invocation_lines.append(invocation_line)
+
+ # The second loop is for optional args.
+ if non_required:
+ invocation_lines.append("**res,")
+ lines.append(" res = {}")
+
+ for a in non_required:
+ an = a.name
+ override = kwargs.get(an, neutral)
+ t = a.type
+ if isinstance(t, TypeVar):
+ t = typevar_map.get(t.__name__, t)
+ elif is_generic(t) and not is_bare(t) and not is_annotated(t):
+ t = deep_copy_with(t, typevar_map)
+
+ # For each attribute, we try resolving the type here and now.
+ # If a type is manually overwritten, this function should be
+ # regenerated.
+ if override.struct_hook is not None:
+ # If the user has requested an override, just use that.
+ handler = override.struct_hook
+ else:
+ handler = find_structure_handler(
+ a, t, converter, _cattrs_prefer_attrib_converters
+ )
+
+ struct_handler_name = f"__c_structure_{an}"
+ internal_arg_parts[struct_handler_name] = handler
+
+ if override.rename is None:
+ kn = an if not _cattrs_use_alias else a.alias
+ else:
+ kn = override.rename
+ allowed_fields.add(kn)
+ if not a.init:
+ pi_lines.append(f" if '{kn}' in o:")
+ if handler:
+ if handler == converter._structure_call:
+ internal_arg_parts[struct_handler_name] = t
+ pi_lines.append(
+ f" instance.{an} = {struct_handler_name}(o['{kn}'])"
+ )
+ else:
+ tn = f"__c_type_{an}"
+ internal_arg_parts[tn] = t
+ pi_lines.append(
+ f" instance.{an} = {struct_handler_name}(o['{kn}'], {tn})"
+ )
+ else:
+ pi_lines.append(f" instance.{an} = o['{kn}']")
+ else:
+ post_lines.append(f" if '{kn}' in o:")
+ if handler:
+ if handler == converter._structure_call:
+ internal_arg_parts[struct_handler_name] = t
+ post_lines.append(
+ f" res['{a.alias}'] = {struct_handler_name}(o['{kn}'])"
+ )
+ else:
+ tn = f"__c_type_{an}"
+ internal_arg_parts[tn] = t
+ post_lines.append(
+ f" res['{a.alias}'] = {struct_handler_name}(o['{kn}'], {tn})"
+ )
+ else:
+ post_lines.append(f" res['{a.alias}'] = o['{kn}']")
+ if not pi_lines:
+ instantiation_lines = (
+ [" return __cl("]
+ + [f" {line}" for line in invocation_lines]
+ + [" )"]
+ )
+ else:
+ instantiation_lines = (
+ [" instance = __cl("]
+ + [f" {line}" for line in invocation_lines]
+ + [" )"]
+ )
+ pi_lines.append(" return instance")
+
+ if _cattrs_forbid_extra_keys:
+ post_lines += [
+ " unknown_fields = set(o.keys()) - __c_a",
+ " if unknown_fields:",
+ " raise __c_feke('', __cl, unknown_fields)",
+ ]
+
+ # At the end, we create the function header.
+ internal_arg_line = ", ".join([f"{i}={i}" for i in internal_arg_parts])
+ for k, v in internal_arg_parts.items():
+ globs[k] = v
+
+ total_lines = [
+ f"def {fn_name}(o, _, {internal_arg_line}):",
+ *lines,
+ *post_lines,
+ *instantiation_lines,
+ *pi_lines,
+ ]
+
+ script = "\n".join(total_lines)
+ fname = generate_unique_filename(
+ cl, "structure", lines=total_lines if _cattrs_use_linecache else []
+ )
+
+ eval(compile(script, fname, "exec"), globs)
+
+ res = globs[fn_name]
+ res.overrides = kwargs
+
+ return res
+
+
+def make_dict_structure_fn(
+ cl: type[T],
+ converter: BaseConverter,
+ _cattrs_forbid_extra_keys: bool | Literal["from_converter"] = "from_converter",
+ _cattrs_use_linecache: bool = True,
+ _cattrs_prefer_attrib_converters: (
+ bool | Literal["from_converter"]
+ ) = "from_converter",
+ _cattrs_detailed_validation: bool | Literal["from_converter"] = "from_converter",
+ _cattrs_use_alias: bool = False,
+ _cattrs_include_init_false: bool = False,
+ **kwargs: AttributeOverride,
+) -> DictStructureFn[T]:
+ """
+ Generate a specialized dict structuring function for an attrs class or
+ dataclass.
+
+ Any provided overrides are attached to the generated function under the
+ `overrides` attribute.
+
+ :param _cattrs_forbid_extra_keys: Whether the structuring function should raise a
+ `ForbiddenExtraKeysError` if unknown keys are encountered.
+ :param _cattrs_use_linecache: Whether to store the source code in the Python
+ linecache.
+ :param _cattrs_prefer_attrib_converters: If an _attrs_ converter is present on a
+ field, use it instead of processing the field normally.
+ :param _cattrs_detailed_validation: Whether to use a slower mode that produces
+ more detailed errors.
+ :param _cattrs_use_alias: If true, the attribute alias will be used as the
+ dictionary key by default.
+ :param _cattrs_include_init_false: If true, _attrs_ fields marked as `init=False`
+ will be included.
+
+ .. versionadded:: 23.2.0 *_cattrs_use_alias*
+ .. versionadded:: 23.2.0 *_cattrs_include_init_false*
+ .. versionchanged:: 23.2.0
+ The `_cattrs_forbid_extra_keys` and `_cattrs_detailed_validation` parameters
+ take their values from the given converter by default.
+ .. versionchanged:: 24.1.0
+ The `_cattrs_prefer_attrib_converters` parameter takes its value from the given
+ converter by default.
+ """
+
+ mapping = {}
+ if is_generic(cl):
+ base = get_origin(cl)
+ mapping = generate_mapping(cl, mapping)
+ if base is not None:
+ cl = base
+
+ for base in getattr(cl, "__orig_bases__", ()):
+ if is_generic(base) and not str(base).startswith("typing.Generic"):
+ mapping = generate_mapping(base, mapping)
+ break
+
+ attrs = adapted_fields(cl)
+
+ if any(isinstance(a.type, str) for a in attrs):
+ # PEP 563 annotations - need to be resolved.
+ resolve_types(cl)
+
+ # We keep track of what we're generating to help with recursive
+ # class graphs.
+ try:
+ working_set = already_generating.working_set
+ except AttributeError:
+ working_set = set()
+ already_generating.working_set = working_set
+ else:
+ if cl in working_set:
+ raise RecursionError()
+
+ working_set.add(cl)
+
+ try:
+ return make_dict_structure_fn_from_attrs(
+ attrs,
+ cl,
+ converter,
+ mapping,
+ _cattrs_forbid_extra_keys=_cattrs_forbid_extra_keys,
+ _cattrs_use_linecache=_cattrs_use_linecache,
+ _cattrs_prefer_attrib_converters=_cattrs_prefer_attrib_converters,
+ _cattrs_detailed_validation=_cattrs_detailed_validation,
+ _cattrs_use_alias=_cattrs_use_alias,
+ _cattrs_include_init_false=_cattrs_include_init_false,
+ **kwargs,
+ )
+ finally:
+ working_set.remove(cl)
+ if not working_set:
+ del already_generating.working_set
+
+
+IterableUnstructureFn = Callable[[Iterable[Any]], Any]
+
+
+#: A type alias for heterogeneous tuple unstructure hooks.
+HeteroTupleUnstructureFn: TypeAlias = Callable[[Tuple[Any, ...]], Any]
+
+
+def make_hetero_tuple_unstructure_fn(
+ cl: Any,
+ converter: BaseConverter,
+ unstructure_to: Any = None,
+ type_args: tuple | None = None,
+) -> HeteroTupleUnstructureFn:
+ """Generate a specialized unstructure function for a heterogenous tuple.
+
+ :param type_args: If provided, override the type arguments.
+ """
+ fn_name = "unstructure_tuple"
+
+ type_args = get_args(cl) if type_args is None else type_args
+
+ # We can do the dispatch here and now.
+ handlers = [converter.get_unstructure_hook(type_arg) for type_arg in type_args]
+
+ globs = {f"__cattr_u_{i}": h for i, h in enumerate(handlers)}
+ if unstructure_to is not tuple:
+ globs["__cattr_seq_cl"] = unstructure_to or cl
+ lines = []
+
+ lines.append(f"def {fn_name}(tup):")
+ if unstructure_to is not tuple:
+ lines.append(" res = __cattr_seq_cl((")
+ else:
+ lines.append(" res = (")
+ for i in range(len(handlers)):
+ if handlers[i] == identity:
+ lines.append(f" tup[{i}],")
+ else:
+ lines.append(f" __cattr_u_{i}(tup[{i}]),")
+
+ if unstructure_to is not tuple:
+ lines.append(" ))")
+ else:
+ lines.append(" )")
+
+ total_lines = [*lines, " return res"]
+
+ eval(compile("\n".join(total_lines), "", "exec"), globs)
+
+ return globs[fn_name]
+
+
+MappingUnstructureFn = Callable[[Mapping[Any, Any]], Any]
+
+
+def make_mapping_unstructure_fn(
+ cl: Any,
+ converter: BaseConverter,
+ unstructure_to: Any = None,
+ key_handler: Callable[[Any, Any | None], Any] | None = None,
+) -> MappingUnstructureFn:
+ """Generate a specialized unstructure function for a mapping."""
+ kh = key_handler or converter.unstructure
+ val_handler = converter.unstructure
+
+ fn_name = "unstructure_mapping"
+
+ # Let's try fishing out the type args.
+ if getattr(cl, "__args__", None) is not None:
+ args = get_args(cl)
+ if len(args) == 2:
+ key_arg, val_arg = args
+ else:
+ # Probably a Counter
+ key_arg, val_arg = args, Any
+ # We can do the dispatch here and now.
+ kh = key_handler or converter.get_unstructure_hook(key_arg, cache_result=False)
+ if kh == identity:
+ kh = None
+
+ val_handler = converter.get_unstructure_hook(val_arg, cache_result=False)
+ if val_handler == identity:
+ val_handler = None
+
+ globs = {
+ "__cattr_mapping_cl": unstructure_to or cl,
+ "__cattr_k_u": kh,
+ "__cattr_v_u": val_handler,
+ }
+
+ k_u = "__cattr_k_u(k)" if kh is not None else "k"
+ v_u = "__cattr_v_u(v)" if val_handler is not None else "v"
+
+ lines = []
+
+ lines.append(f"def {fn_name}(mapping):")
+ lines.append(
+ f" res = __cattr_mapping_cl(({k_u}, {v_u}) for k, v in mapping.items())"
+ )
+
+ total_lines = [*lines, " return res"]
+
+ eval(compile("\n".join(total_lines), "", "exec"), globs)
+
+ return globs[fn_name]
+
+
+MappingStructureFn = Callable[[Mapping[Any, Any], Any], T]
+
+
+# This factory is here for backwards compatibility and circular imports.
+def mapping_structure_factory(
+ cl: type[T],
+ converter: BaseConverter,
+ structure_to: type = dict,
+ key_type=NOTHING,
+ val_type=NOTHING,
+ detailed_validation: bool = True,
+) -> MappingStructureFn[T]:
+ """Generate a specialized structure function for a mapping."""
+ fn_name = "structure_mapping"
+
+ globs: dict[str, type] = {"__cattr_mapping_cl": structure_to}
+
+ lines = []
+ internal_arg_parts = {}
+
+ # Let's try fishing out the type args.
+ if not is_bare(cl):
+ args = get_args(cl)
+ if len(args) == 2:
+ key_arg_cand, val_arg_cand = args
+ if key_type is NOTHING:
+ key_type = key_arg_cand
+ if val_type is NOTHING:
+ val_type = val_arg_cand
+ else:
+ if key_type is not NOTHING and val_type is NOTHING:
+ (val_type,) = args
+ elif key_type is NOTHING and val_type is not NOTHING:
+ (key_type,) = args
+ else:
+ # Probably a Counter
+ (key_type,) = args
+ val_type = Any
+
+ is_bare_dict = val_type in ANIES and key_type in ANIES
+ if not is_bare_dict:
+ # We can do the dispatch here and now.
+ key_handler = converter.get_structure_hook(key_type, cache_result=False)
+ if key_handler == converter._structure_call:
+ key_handler = key_type
+
+ val_handler = converter.get_structure_hook(val_type, cache_result=False)
+ if val_handler == converter._structure_call:
+ val_handler = val_type
+
+ globs["__cattr_k_t"] = key_type
+ globs["__cattr_v_t"] = val_type
+ globs["__cattr_k_s"] = key_handler
+ globs["__cattr_v_s"] = val_handler
+ k_s = (
+ "__cattr_k_s(k, __cattr_k_t)"
+ if key_handler != key_type
+ else "__cattr_k_s(k)"
+ )
+ v_s = (
+ "__cattr_v_s(v, __cattr_v_t)"
+ if val_handler != val_type
+ else "__cattr_v_s(v)"
+ )
+ else:
+ is_bare_dict = True
+
+ if is_bare_dict:
+ # No args, it's a bare dict.
+ lines.append(" res = dict(mapping)")
+ else:
+ if detailed_validation:
+ internal_arg_parts["IterableValidationError"] = IterableValidationError
+ internal_arg_parts["IterableValidationNote"] = IterableValidationNote
+ internal_arg_parts["val_type"] = (
+ val_type if val_type is not NOTHING else Any
+ )
+ internal_arg_parts["key_type"] = (
+ key_type if key_type is not NOTHING else Any
+ )
+ globs["enumerate"] = enumerate
+
+ lines.append(" res = {}; errors = []")
+ lines.append(" for k, v in mapping.items():")
+ lines.append(" try:")
+ lines.append(f" value = {v_s}")
+ lines.append(" except Exception as e:")
+ lines.append(
+ " e.__notes__ = getattr(e, '__notes__', []) + [IterableValidationNote(f'Structuring mapping value @ key {k!r}', k, val_type)]"
+ )
+ lines.append(" errors.append(e)")
+ lines.append(" continue")
+ lines.append(" try:")
+ lines.append(f" key = {k_s}")
+ lines.append(" res[key] = value")
+ lines.append(" except Exception as e:")
+ lines.append(
+ " e.__notes__ = getattr(e, '__notes__', []) + [IterableValidationNote(f'Structuring mapping key @ key {k!r}', k, key_type)]"
+ )
+ lines.append(" errors.append(e)")
+ lines.append(" if errors:")
+ lines.append(
+ f" raise IterableValidationError('While structuring ' + {repr(cl)!r}, errors, __cattr_mapping_cl)"
+ )
+ else:
+ lines.append(f" res = {{{k_s}: {v_s} for k, v in mapping.items()}}")
+ if structure_to is not dict:
+ lines.append(" res = __cattr_mapping_cl(res)")
+
+ internal_arg_line = ", ".join([f"{i}={i}" for i in internal_arg_parts])
+ if internal_arg_line:
+ internal_arg_line = f", {internal_arg_line}"
+ for k, v in internal_arg_parts.items():
+ globs[k] = v
+
+ def_line = f"def {fn_name}(mapping, _{internal_arg_line}):"
+ total_lines = [def_line, *lines, " return res"]
+ script = "\n".join(total_lines)
+
+ eval(compile(script, "", "exec"), globs)
+
+ return globs[fn_name]
+
+
+make_mapping_structure_fn: Final = mapping_structure_factory
+
+
+# This factory is here for backwards compatibility and circular imports.
+def iterable_unstructure_factory(
+ cl: Any, converter: BaseConverter, unstructure_to: Any = None
+) -> UnstructureHook:
+ """A hook factory for unstructuring iterables.
+
+ :param unstructure_to: Force unstructuring to this type, if provided.
+ """
+ handler = converter.unstructure
+
+ # Let's try fishing out the type args
+ # Unspecified tuples have `__args__` as empty tuples, so guard
+ # against IndexError.
+ if getattr(cl, "__args__", None) not in (None, ()):
+ type_arg = cl.__args__[0]
+ if isinstance(type_arg, TypeVar):
+ type_arg = getattr(type_arg, "__default__", Any)
+ handler = converter.get_unstructure_hook(type_arg, cache_result=False)
+ if handler == identity:
+ # Save ourselves the trouble of iterating over it all.
+ return unstructure_to or cl
+
+ def unstructure_iterable(iterable, _seq_cl=unstructure_to or cl, _hook=handler):
+ return _seq_cl(_hook(i) for i in iterable)
+
+ return unstructure_iterable
+
+
+make_iterable_unstructure_fn: Final = iterable_unstructure_factory
diff --git a/lambdas/aws-dd-forwarder-3.127.0/cattrs/gen/_consts.py b/lambdas/aws-dd-forwarder-3.127.0/cattrs/gen/_consts.py
new file mode 100644
index 0000000..a6dcd03
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/cattrs/gen/_consts.py
@@ -0,0 +1,19 @@
+from __future__ import annotations
+
+from threading import local
+from typing import Any, Callable
+
+from attrs import frozen
+
+
+@frozen
+class AttributeOverride:
+ omit_if_default: bool | None = None
+ rename: str | None = None
+ omit: bool | None = None # Omit the field completely.
+ struct_hook: Callable[[Any, Any], Any] | None = None # Structure hook to use.
+ unstruct_hook: Callable[[Any], Any] | None = None # Structure hook to use.
+
+
+neutral = AttributeOverride()
+already_generating = local()
diff --git a/lambdas/aws-dd-forwarder-3.127.0/cattrs/gen/_generics.py b/lambdas/aws-dd-forwarder-3.127.0/cattrs/gen/_generics.py
new file mode 100644
index 0000000..069c48c
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/cattrs/gen/_generics.py
@@ -0,0 +1,79 @@
+from __future__ import annotations
+
+from typing import TypeVar
+
+from .._compat import get_args, get_origin, is_generic
+
+
+def _tvar_has_default(tvar) -> bool:
+ """Does `tvar` have a default?
+
+ In CPython 3.13+ and typing_extensions>=4.12.0:
+ - TypeVars have a `no_default()` method for detecting
+ if a TypeVar has a default
+ - TypeVars with `default=None` have `__default__` set to `None`
+ - TypeVars with no `default` parameter passed
+ have `__default__` set to `typing(_extensions).NoDefault
+
+ On typing_exensions<4.12.0:
+ - TypeVars do not have a `no_default()` method for detecting
+ if a TypeVar has a default
+ - TypeVars with `default=None` have `__default__` set to `NoneType`
+ - TypeVars with no `default` parameter passed
+ have `__default__` set to `typing(_extensions).NoDefault
+ """
+ try:
+ return tvar.has_default()
+ except AttributeError:
+ # compatibility for typing_extensions<4.12.0
+ return getattr(tvar, "__default__", None) is not None
+
+
+def generate_mapping(cl: type, old_mapping: dict[str, type] = {}) -> dict[str, type]:
+ """Generate a mapping of typevars to actual types for a generic class."""
+ mapping = dict(old_mapping)
+
+ origin = get_origin(cl)
+
+ if origin is not None:
+ # To handle the cases where classes in the typing module are using
+ # the GenericAlias structure but aren't a Generic and hence
+ # end up in this function but do not have an `__parameters__`
+ # attribute. These classes are interface types, for example
+ # `typing.Hashable`.
+ parameters = getattr(get_origin(cl), "__parameters__", None)
+ if parameters is None:
+ return dict(old_mapping)
+
+ for p, t in zip(parameters, get_args(cl)):
+ if isinstance(t, TypeVar):
+ continue
+ mapping[p.__name__] = t
+
+ elif is_generic(cl):
+ # Origin is None, so this may be a subclass of a generic class.
+ orig_bases = cl.__orig_bases__
+ for base in orig_bases:
+ if not hasattr(base, "__args__"):
+ continue
+ base_args = base.__args__
+ if hasattr(base.__origin__, "__parameters__"):
+ base_params = base.__origin__.__parameters__
+ elif any(_tvar_has_default(base_arg) for base_arg in base_args):
+ # TypeVar with a default e.g. PEP 696
+ # https://www.python.org/dev/peps/pep-0696/
+ # Extract the defaults for the TypeVars and insert
+ # them into the mapping
+ mapping_params = [
+ (base_arg, base_arg.__default__)
+ for base_arg in base_args
+ if _tvar_has_default(base_arg)
+ ]
+ base_params, base_args = zip(*mapping_params)
+ else:
+ continue
+
+ for param, arg in zip(base_params, base_args):
+ mapping[param.__name__] = arg
+
+ return mapping
diff --git a/lambdas/aws-dd-forwarder-3.127.0/cattrs/gen/_lc.py b/lambdas/aws-dd-forwarder-3.127.0/cattrs/gen/_lc.py
new file mode 100644
index 0000000..04843cd
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/cattrs/gen/_lc.py
@@ -0,0 +1,29 @@
+"""Line-cache functionality."""
+
+import linecache
+from typing import List
+
+
+def generate_unique_filename(cls: type, func_name: str, lines: List[str] = []) -> str:
+ """
+ Create a "filename" suitable for a function being generated.
+
+ If *lines* are provided, insert them in the first free spot or stop
+ if a duplicate is found.
+ """
+ extra = ""
+ count = 1
+
+ while True:
+ unique_filename = "".format(
+ func_name, cls.__module__, getattr(cls, "__qualname__", cls.__name__), extra
+ )
+ if not lines:
+ return unique_filename
+ cache_line = (len("\n".join(lines)), None, lines, unique_filename)
+ if linecache.cache.setdefault(unique_filename, cache_line) == cache_line:
+ return unique_filename
+
+ # Looks like this spot is taken. Try again.
+ count += 1
+ extra = f"-{count}"
diff --git a/lambdas/aws-dd-forwarder-3.127.0/cattrs/gen/_shared.py b/lambdas/aws-dd-forwarder-3.127.0/cattrs/gen/_shared.py
new file mode 100644
index 0000000..4e63143
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/cattrs/gen/_shared.py
@@ -0,0 +1,58 @@
+from __future__ import annotations
+
+from typing import TYPE_CHECKING, Any
+
+from attrs import NOTHING, Attribute, Factory
+
+from .._compat import is_bare_final
+from ..dispatch import StructureHook
+from ..fns import raise_error
+
+if TYPE_CHECKING:
+ from ..converters import BaseConverter
+
+
+def find_structure_handler(
+ a: Attribute, type: Any, c: BaseConverter, prefer_attrs_converters: bool = False
+) -> StructureHook | None:
+ """Find the appropriate structure handler to use.
+
+ Return `None` if no handler should be used.
+ """
+ try:
+ if a.converter is not None and prefer_attrs_converters:
+ # If the user as requested to use attrib converters, use nothing
+ # so it falls back to that.
+ handler = None
+ elif (
+ a.converter is not None and not prefer_attrs_converters and type is not None
+ ):
+ handler = c.get_structure_hook(type, cache_result=False)
+ if handler == raise_error:
+ handler = None
+ elif type is not None:
+ if (
+ is_bare_final(type)
+ and a.default is not NOTHING
+ and not isinstance(a.default, Factory)
+ ):
+ # This is a special case where we can use the
+ # type of the default to dispatch on.
+ type = a.default.__class__
+ handler = c.get_structure_hook(type, cache_result=False)
+ if handler == c._structure_call:
+ # Finals can't really be used with _structure_call, so
+ # we wrap it so the rest of the toolchain doesn't get
+ # confused.
+
+ def handler(v, _, _h=handler):
+ return _h(v, type)
+
+ else:
+ handler = c.get_structure_hook(type, cache_result=False)
+ else:
+ handler = c.structure
+ return handler
+ except RecursionError:
+ # This means we're dealing with a reference cycle, so use late binding.
+ return c.structure
diff --git a/lambdas/aws-dd-forwarder-3.127.0/cattrs/gen/typeddicts.py b/lambdas/aws-dd-forwarder-3.127.0/cattrs/gen/typeddicts.py
new file mode 100644
index 0000000..5614d6f
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/cattrs/gen/typeddicts.py
@@ -0,0 +1,611 @@
+from __future__ import annotations
+
+import re
+import sys
+from typing import TYPE_CHECKING, Any, Callable, Literal, TypeVar
+
+from attrs import NOTHING, Attribute
+
+try:
+ from inspect import get_annotations
+
+ def get_annots(cl) -> dict[str, Any]:
+ return get_annotations(cl, eval_str=True)
+
+except ImportError:
+ # https://docs.python.org/3/howto/annotations.html#accessing-the-annotations-dict-of-an-object-in-python-3-9-and-older
+ def get_annots(cl) -> dict[str, Any]:
+ if isinstance(cl, type):
+ ann = cl.__dict__.get("__annotations__", {})
+ else:
+ ann = getattr(cl, "__annotations__", {})
+ return ann
+
+
+try:
+ from typing_extensions import _TypedDictMeta
+except ImportError:
+ _TypedDictMeta = None
+
+from .._compat import (
+ TypedDict,
+ get_full_type_hints,
+ get_notrequired_base,
+ get_origin,
+ is_annotated,
+ is_bare,
+ is_generic,
+)
+from .._generics import deep_copy_with
+from ..errors import (
+ AttributeValidationNote,
+ ClassValidationError,
+ ForbiddenExtraKeysError,
+ StructureHandlerNotFoundError,
+)
+from ..fns import identity
+from . import AttributeOverride
+from ._consts import already_generating, neutral
+from ._generics import generate_mapping
+from ._lc import generate_unique_filename
+from ._shared import find_structure_handler
+
+if TYPE_CHECKING:
+ from ..converters import BaseConverter
+
+__all__ = ["make_dict_unstructure_fn", "make_dict_structure_fn"]
+
+T = TypeVar("T", bound=TypedDict)
+
+
+def make_dict_unstructure_fn(
+ cl: type[T],
+ converter: BaseConverter,
+ _cattrs_use_linecache: bool = True,
+ **kwargs: AttributeOverride,
+) -> Callable[[T], dict[str, Any]]:
+ """
+ Generate a specialized dict unstructuring function for a TypedDict.
+
+ :param cl: A `TypedDict` class.
+ :param converter: A Converter instance to use for unstructuring nested fields.
+ :param kwargs: A mapping of field names to an `AttributeOverride`, for
+ customization.
+ :param _cattrs_detailed_validation: Whether to store the generated code in the
+ _linecache_, for easier debugging and better stack traces.
+ """
+ origin = get_origin(cl)
+ attrs = _adapted_fields(origin or cl) # type: ignore
+ req_keys = _required_keys(origin or cl)
+
+ mapping = {}
+ if is_generic(cl):
+ mapping = generate_mapping(cl, mapping)
+
+ for base in getattr(origin, "__orig_bases__", ()):
+ if is_generic(base) and not str(base).startswith("typing.Generic"):
+ mapping = generate_mapping(base, mapping)
+ break
+
+ # It's possible for origin to be None if this is a subclass
+ # of a generic class.
+ if origin is not None:
+ cl = origin
+
+ cl_name = cl.__name__
+ fn_name = "unstructure_typeddict_" + cl_name
+ globs = {}
+ lines = []
+ internal_arg_parts = {}
+
+ # We keep track of what we're generating to help with recursive
+ # class graphs.
+ try:
+ working_set = already_generating.working_set
+ except AttributeError:
+ working_set = set()
+ already_generating.working_set = working_set
+ if cl in working_set:
+ raise RecursionError()
+ working_set.add(cl)
+
+ try:
+ # We want to short-circuit in certain cases and return the identity
+ # function.
+ # We short-circuit if all of these are true:
+ # * no attributes have been overridden
+ # * all attributes resolve to `converter._unstructure_identity`
+ for a in attrs:
+ attr_name = a.name
+ override = kwargs.get(attr_name, neutral)
+ if override != neutral:
+ break
+ handler = None
+ t = a.type
+
+ if isinstance(t, TypeVar):
+ if t.__name__ in mapping:
+ t = mapping[t.__name__]
+ else:
+ # Unbound typevars use late binding.
+ handler = converter.unstructure
+ elif is_generic(t) and not is_bare(t) and not is_annotated(t):
+ t = deep_copy_with(t, mapping)
+
+ if handler is None:
+ nrb = get_notrequired_base(t)
+ if nrb is not NOTHING:
+ t = nrb
+ try:
+ handler = converter.get_unstructure_hook(t)
+ except RecursionError:
+ # There's a circular reference somewhere down the line
+ handler = converter.unstructure
+ is_identity = handler == identity
+ if not is_identity:
+ break
+ else:
+ # We've not broken the loop.
+ return identity
+
+ for ix, a in enumerate(attrs):
+ attr_name = a.name
+ override = kwargs.get(attr_name, neutral)
+ if override.omit:
+ lines.append(f" res.pop('{attr_name}', None)")
+ continue
+ if override.rename is not None:
+ # We also need to pop when renaming, since we're copying
+ # the original.
+ lines.append(f" res.pop('{attr_name}', None)")
+ kn = attr_name if override.rename is None else override.rename
+ attr_required = attr_name in req_keys
+
+ # For each attribute, we try resolving the type here and now.
+ # If a type is manually overwritten, this function should be
+ # regenerated.
+ handler = None
+ if override.unstruct_hook is not None:
+ handler = override.unstruct_hook
+ else:
+ t = a.type
+
+ if isinstance(t, TypeVar):
+ if t.__name__ in mapping:
+ t = mapping[t.__name__]
+ else:
+ handler = converter.unstructure
+ elif is_generic(t) and not is_bare(t) and not is_annotated(t):
+ t = deep_copy_with(t, mapping)
+
+ if handler is None:
+ nrb = get_notrequired_base(t)
+ if nrb is not NOTHING:
+ t = nrb
+ try:
+ handler = converter.get_unstructure_hook(t)
+ except RecursionError:
+ # There's a circular reference somewhere down the line
+ handler = converter.unstructure
+
+ is_identity = handler == identity
+
+ if not is_identity:
+ unstruct_handler_name = f"__c_unstr_{ix}"
+ globs[unstruct_handler_name] = handler
+ internal_arg_parts[unstruct_handler_name] = handler
+ invoke = f"{unstruct_handler_name}(instance['{attr_name}'])"
+ elif override.rename is None:
+ # We're not doing anything to this attribute, so
+ # it'll already be present in the input dict.
+ continue
+ else:
+ # Probably renamed, we just fetch it.
+ invoke = f"instance['{attr_name}']"
+
+ if attr_required:
+ # No default or no override.
+ lines.append(f" res['{kn}'] = {invoke}")
+ else:
+ lines.append(f" if '{attr_name}' in instance: res['{kn}'] = {invoke}")
+
+ internal_arg_line = ", ".join([f"{i}={i}" for i in internal_arg_parts])
+ if internal_arg_line:
+ internal_arg_line = f", {internal_arg_line}"
+ for k, v in internal_arg_parts.items():
+ globs[k] = v
+
+ total_lines = [
+ f"def {fn_name}(instance{internal_arg_line}):",
+ " res = instance.copy()",
+ *lines,
+ " return res",
+ ]
+ script = "\n".join(total_lines)
+
+ fname = generate_unique_filename(
+ cl, "unstructure", lines=total_lines if _cattrs_use_linecache else []
+ )
+
+ eval(compile(script, fname, "exec"), globs)
+ finally:
+ working_set.remove(cl)
+ if not working_set:
+ del already_generating.working_set
+
+ return globs[fn_name]
+
+
+def make_dict_structure_fn(
+ cl: Any,
+ converter: BaseConverter,
+ _cattrs_forbid_extra_keys: bool | Literal["from_converter"] = "from_converter",
+ _cattrs_use_linecache: bool = True,
+ _cattrs_detailed_validation: bool | Literal["from_converter"] = "from_converter",
+ **kwargs: AttributeOverride,
+) -> Callable[[dict, Any], Any]:
+ """Generate a specialized dict structuring function for typed dicts.
+
+ :param cl: A `TypedDict` class.
+ :param converter: A Converter instance to use for structuring nested fields.
+ :param kwargs: A mapping of field names to an `AttributeOverride`, for
+ customization.
+ :param _cattrs_detailed_validation: Whether to use a slower mode that produces
+ more detailed errors.
+ :param _cattrs_forbid_extra_keys: Whether the structuring function should raise a
+ `ForbiddenExtraKeysError` if unknown keys are encountered.
+ :param _cattrs_detailed_validation: Whether to store the generated code in the
+ _linecache_, for easier debugging and better stack traces.
+
+ .. versionchanged:: 23.2.0
+ The `_cattrs_forbid_extra_keys` and `_cattrs_detailed_validation` parameters
+ take their values from the given converter by default.
+ """
+
+ mapping = {}
+ if is_generic(cl):
+ base = get_origin(cl)
+ mapping = generate_mapping(cl, mapping)
+ if base is not None:
+ # It's possible for this to be a subclass of a generic,
+ # so no origin.
+ cl = base
+
+ for base in getattr(cl, "__orig_bases__", ()):
+ if is_generic(base) and not str(base).startswith("typing.Generic"):
+ mapping = generate_mapping(base, mapping)
+ break
+
+ cl_name = cl.__name__
+ fn_name = "structure_" + cl_name
+
+ # We have generic parameters and need to generate a unique name for the function
+ for p in getattr(cl, "__parameters__", ()):
+ try:
+ name_base = mapping[p.__name__]
+ except KeyError:
+ pn = p.__name__
+ raise StructureHandlerNotFoundError(
+ f"Missing type for generic argument {pn}, specify it when structuring.",
+ p,
+ ) from None
+ name = getattr(name_base, "__name__", None) or str(name_base)
+ # `<>` can be present in lambdas
+ # `|` can be present in unions
+ name = re.sub(r"[\[\.\] ,<>]", "_", name)
+ name = re.sub(r"\|", "u", name)
+ fn_name += f"_{name}"
+
+ internal_arg_parts = {"__cl": cl}
+ globs = {}
+ lines = []
+ post_lines = []
+
+ attrs = _adapted_fields(cl)
+ req_keys = _required_keys(cl)
+
+ allowed_fields = set()
+ if _cattrs_forbid_extra_keys == "from_converter":
+ # BaseConverter doesn't have it so we're careful.
+ _cattrs_forbid_extra_keys = getattr(converter, "forbid_extra_keys", False)
+ if _cattrs_detailed_validation == "from_converter":
+ _cattrs_detailed_validation = converter.detailed_validation
+
+ if _cattrs_forbid_extra_keys:
+ globs["__c_a"] = allowed_fields
+ globs["__c_feke"] = ForbiddenExtraKeysError
+
+ lines.append(" res = o.copy()")
+
+ if _cattrs_detailed_validation:
+ lines.append(" errors = []")
+ internal_arg_parts["__c_cve"] = ClassValidationError
+ internal_arg_parts["__c_avn"] = AttributeValidationNote
+ for ix, a in enumerate(attrs):
+ an = a.name
+ attr_required = an in req_keys
+ override = kwargs.get(an, neutral)
+ if override.omit:
+ continue
+ t = a.type
+
+ if isinstance(t, TypeVar):
+ t = mapping.get(t.__name__, t)
+ elif is_generic(t) and not is_bare(t) and not is_annotated(t):
+ t = deep_copy_with(t, mapping)
+
+ nrb = get_notrequired_base(t)
+ if nrb is not NOTHING:
+ t = nrb
+
+ if is_generic(t) and not is_bare(t) and not is_annotated(t):
+ t = deep_copy_with(t, mapping)
+
+ # For each attribute, we try resolving the type here and now.
+ # If a type is manually overwritten, this function should be
+ # regenerated.
+ if override.struct_hook is not None:
+ # If the user has requested an override, just use that.
+ handler = override.struct_hook
+ else:
+ handler = find_structure_handler(a, t, converter)
+
+ struct_handler_name = f"__c_structure_{ix}"
+ internal_arg_parts[struct_handler_name] = handler
+
+ kn = an if override.rename is None else override.rename
+ allowed_fields.add(kn)
+ i = " "
+ if not attr_required:
+ lines.append(f"{i}if '{kn}' in o:")
+ i = f"{i} "
+ lines.append(f"{i}try:")
+ i = f"{i} "
+
+ tn = f"__c_type_{ix}"
+ internal_arg_parts[tn] = t
+
+ if handler == converter._structure_call:
+ internal_arg_parts[struct_handler_name] = t
+ lines.append(f"{i}res['{an}'] = {struct_handler_name}(o['{kn}'])")
+ else:
+ lines.append(f"{i}res['{an}'] = {struct_handler_name}(o['{kn}'], {tn})")
+ if override.rename is not None:
+ lines.append(f"{i}del res['{kn}']")
+ i = i[:-2]
+ lines.append(f"{i}except Exception as e:")
+ i = f"{i} "
+ lines.append(
+ f'{i}e.__notes__ = [*getattr(e, \'__notes__\', []), __c_avn("Structuring typeddict {cl.__qualname__} @ attribute {an}", "{an}", {tn})]'
+ )
+ lines.append(f"{i}errors.append(e)")
+
+ if _cattrs_forbid_extra_keys:
+ post_lines += [
+ " unknown_fields = o.keys() - __c_a",
+ " if unknown_fields:",
+ " errors.append(__c_feke('', __cl, unknown_fields))",
+ ]
+
+ post_lines.append(
+ f" if errors: raise __c_cve('While structuring ' + {cl.__name__!r}, errors, __cl)"
+ )
+ else:
+ non_required = []
+
+ # The first loop deals with required args.
+ for ix, a in enumerate(attrs):
+ an = a.name
+ attr_required = an in req_keys
+ override = kwargs.get(an, neutral)
+ if override.omit:
+ continue
+ if not attr_required:
+ non_required.append((ix, a))
+ continue
+
+ t = a.type
+
+ if isinstance(t, TypeVar):
+ t = mapping.get(t.__name__, t)
+ elif is_generic(t) and not is_bare(t) and not is_annotated(t):
+ t = deep_copy_with(t, mapping)
+
+ nrb = get_notrequired_base(t)
+ if nrb is not NOTHING:
+ t = nrb
+
+ if override.struct_hook is not None:
+ handler = override.struct_hook
+ else:
+ # For each attribute, we try resolving the type here and now.
+ # If a type is manually overwritten, this function should be
+ # regenerated.
+ handler = converter.get_structure_hook(t)
+
+ kn = an if override.rename is None else override.rename
+ allowed_fields.add(kn)
+
+ struct_handler_name = f"__c_structure_{ix}"
+ internal_arg_parts[struct_handler_name] = handler
+ if handler == converter._structure_call:
+ internal_arg_parts[struct_handler_name] = t
+ invocation_line = f" res['{an}'] = {struct_handler_name}(o['{kn}'])"
+ else:
+ tn = f"__c_type_{ix}"
+ internal_arg_parts[tn] = t
+ invocation_line = (
+ f" res['{an}'] = {struct_handler_name}(o['{kn}'], {tn})"
+ )
+
+ lines.append(invocation_line)
+ if override.rename is not None:
+ lines.append(f" del res['{override.rename}']")
+
+ # The second loop is for optional args.
+ if non_required:
+ for ix, a in non_required:
+ an = a.name
+ override = kwargs.get(an, neutral)
+ t = a.type
+
+ nrb = get_notrequired_base(t)
+ if nrb is not NOTHING:
+ t = nrb
+
+ if isinstance(t, TypeVar):
+ t = mapping.get(t.__name__, t)
+ elif is_generic(t) and not is_bare(t) and not is_annotated(t):
+ t = deep_copy_with(t, mapping)
+
+ if override.struct_hook is not None:
+ handler = override.struct_hook
+ else:
+ # For each attribute, we try resolving the type here and now.
+ # If a type is manually overwritten, this function should be
+ # regenerated.
+ handler = converter.get_structure_hook(t)
+
+ struct_handler_name = f"__c_structure_{ix}"
+ internal_arg_parts[struct_handler_name] = handler
+
+ ian = an
+ kn = an if override.rename is None else override.rename
+ allowed_fields.add(kn)
+ post_lines.append(f" if '{kn}' in o:")
+ if handler == converter._structure_call:
+ internal_arg_parts[struct_handler_name] = t
+ post_lines.append(
+ f" res['{ian}'] = {struct_handler_name}(o['{kn}'])"
+ )
+ else:
+ tn = f"__c_type_{ix}"
+ internal_arg_parts[tn] = t
+ post_lines.append(
+ f" res['{ian}'] = {struct_handler_name}(o['{kn}'], {tn})"
+ )
+ if override.rename is not None:
+ lines.append(f" res.pop('{override.rename}', None)")
+
+ if _cattrs_forbid_extra_keys:
+ post_lines += [
+ " unknown_fields = o.keys() - __c_a",
+ " if unknown_fields:",
+ " raise __c_feke('', __cl, unknown_fields)",
+ ]
+
+ # At the end, we create the function header.
+ internal_arg_line = ", ".join([f"{i}={i}" for i in internal_arg_parts])
+ for k, v in internal_arg_parts.items():
+ globs[k] = v
+
+ total_lines = [
+ f"def {fn_name}(o, _, {internal_arg_line}):",
+ *lines,
+ *post_lines,
+ " return res",
+ ]
+
+ script = "\n".join(total_lines)
+ fname = generate_unique_filename(
+ cl, "structure", lines=total_lines if _cattrs_use_linecache else []
+ )
+
+ eval(compile(script, fname, "exec"), globs)
+ return globs[fn_name]
+
+
+def _adapted_fields(cls: Any) -> list[Attribute]:
+ annotations = get_annots(cls)
+ hints = get_full_type_hints(cls)
+ return [
+ Attribute(
+ n,
+ NOTHING,
+ None,
+ False,
+ False,
+ False,
+ False,
+ False,
+ type=hints[n] if n in hints else annotations[n],
+ )
+ for n, a in annotations.items()
+ ]
+
+
+def _is_extensions_typeddict(cls) -> bool:
+ if _TypedDictMeta is None:
+ return False
+ return cls.__class__ is _TypedDictMeta or (
+ is_generic(cls) and (cls.__origin__.__class__ is _TypedDictMeta)
+ )
+
+
+if sys.version_info >= (3, 11):
+
+ def _required_keys(cls: type) -> set[str]:
+ return cls.__required_keys__
+
+elif sys.version_info >= (3, 9):
+ from typing_extensions import Annotated, NotRequired, Required, get_args
+
+ # Note that there is no `typing.Required` on 3.9 and 3.10, only in
+ # `typing_extensions`. Therefore, `typing.TypedDict` will not honor this
+ # annotation, only `typing_extensions.TypedDict`.
+
+ def _required_keys(cls: type) -> set[str]:
+ """Our own processor for required keys."""
+ if _is_extensions_typeddict(cls):
+ return cls.__required_keys__
+
+ # We vendor a part of the typing_extensions logic for
+ # gathering required keys. *sigh*
+ own_annotations = cls.__dict__.get("__annotations__", {})
+ required_keys = set()
+ # On 3.8 - 3.10, typing.TypedDict doesn't put typeddict superclasses
+ # in the MRO, therefore we cannot handle non-required keys properly
+ # in some situations. Oh well.
+ for key in getattr(cls, "__required_keys__", []):
+ annotation_type = own_annotations[key]
+ annotation_origin = get_origin(annotation_type)
+ if annotation_origin is Annotated:
+ annotation_args = get_args(annotation_type)
+ if annotation_args:
+ annotation_type = annotation_args[0]
+ annotation_origin = get_origin(annotation_type)
+
+ if annotation_origin is NotRequired:
+ pass
+ elif cls.__total__:
+ required_keys.add(key)
+ return required_keys
+
+else:
+ from typing_extensions import Annotated, NotRequired, Required, get_args
+
+ # On 3.8, typing.TypedDicts do not have __required_keys__.
+
+ def _required_keys(cls: type) -> set[str]:
+ """Our own processor for required keys."""
+ if _is_extensions_typeddict(cls):
+ return cls.__required_keys__
+
+ own_annotations = cls.__dict__.get("__annotations__", {})
+ required_keys = set()
+ for key in own_annotations:
+ annotation_type = own_annotations[key]
+
+ if is_annotated(annotation_type):
+ # If this is `Annotated`, we need to get the origin twice.
+ annotation_type = get_origin(annotation_type)
+
+ annotation_origin = get_origin(annotation_type)
+
+ if annotation_origin is Required:
+ required_keys.add(key)
+ elif annotation_origin is NotRequired:
+ pass
+ elif cls.__total__:
+ required_keys.add(key)
+ return required_keys
diff --git a/lambdas/aws-dd-forwarder-3.127.0/cattrs/preconf/__init__.py b/lambdas/aws-dd-forwarder-3.127.0/cattrs/preconf/__init__.py
new file mode 100644
index 0000000..876576d
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/cattrs/preconf/__init__.py
@@ -0,0 +1,27 @@
+import sys
+from datetime import datetime
+from typing import Any, Callable, TypeVar
+
+if sys.version_info[:2] < (3, 10):
+ from typing_extensions import ParamSpec
+else:
+ from typing import ParamSpec
+
+
+def validate_datetime(v, _):
+ if not isinstance(v, datetime):
+ raise Exception(f"Expected datetime, got {v}")
+ return v
+
+
+T = TypeVar("T")
+P = ParamSpec("P")
+
+
+def wrap(_: Callable[P, Any]) -> Callable[[Callable[..., T]], Callable[P, T]]:
+ """Wrap a `Converter` `__init__` in a type-safe way."""
+
+ def impl(x: Callable[..., T]) -> Callable[P, T]:
+ return x
+
+ return impl
diff --git a/lambdas/aws-dd-forwarder-3.127.0/cattrs/preconf/bson.py b/lambdas/aws-dd-forwarder-3.127.0/cattrs/preconf/bson.py
new file mode 100644
index 0000000..e73d131
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/cattrs/preconf/bson.py
@@ -0,0 +1,106 @@
+"""Preconfigured converters for bson."""
+
+from base64 import b85decode, b85encode
+from datetime import date, datetime
+from typing import Any, Type, TypeVar, Union
+
+from bson import DEFAULT_CODEC_OPTIONS, CodecOptions, Int64, ObjectId, decode, encode
+
+from cattrs._compat import AbstractSet, is_mapping
+from cattrs.gen import make_mapping_structure_fn
+
+from ..converters import BaseConverter, Converter
+from ..dispatch import StructureHook
+from ..strategies import configure_union_passthrough
+from . import validate_datetime, wrap
+
+T = TypeVar("T")
+
+
+class Base85Bytes(bytes):
+ """A subclass to help with binary key encoding/decoding."""
+
+
+class BsonConverter(Converter):
+ def dumps(
+ self,
+ obj: Any,
+ unstructure_as: Any = None,
+ check_keys: bool = False,
+ codec_options: CodecOptions = DEFAULT_CODEC_OPTIONS,
+ ) -> bytes:
+ return encode(
+ self.unstructure(obj, unstructure_as=unstructure_as),
+ check_keys=check_keys,
+ codec_options=codec_options,
+ )
+
+ def loads(
+ self,
+ data: bytes,
+ cl: Type[T],
+ codec_options: CodecOptions = DEFAULT_CODEC_OPTIONS,
+ ) -> T:
+ return self.structure(decode(data, codec_options=codec_options), cl)
+
+
+def configure_converter(converter: BaseConverter):
+ """
+ Configure the converter for use with the bson library.
+
+ * sets are serialized as lists
+ * byte mapping keys are base85-encoded into strings when unstructuring, and reverse
+ * non-string, non-byte mapping keys are coerced into strings when unstructuring
+ * a deserialization hook is registered for bson.ObjectId by default
+ """
+
+ def gen_unstructure_mapping(cl: Any, unstructure_to=None):
+ key_handler = str
+ args = getattr(cl, "__args__", None)
+ if args:
+ if issubclass(args[0], str):
+ key_handler = None
+ elif issubclass(args[0], bytes):
+
+ def key_handler(k):
+ return b85encode(k).decode("utf8")
+
+ return converter.gen_unstructure_mapping(
+ cl, unstructure_to=unstructure_to, key_handler=key_handler
+ )
+
+ def gen_structure_mapping(cl: Any) -> StructureHook:
+ args = getattr(cl, "__args__", None)
+ if args and issubclass(args[0], bytes):
+ h = make_mapping_structure_fn(cl, converter, key_type=Base85Bytes)
+ else:
+ h = make_mapping_structure_fn(cl, converter)
+ return h
+
+ converter.register_structure_hook(Base85Bytes, lambda v, _: b85decode(v))
+ converter.register_unstructure_hook_factory(is_mapping, gen_unstructure_mapping)
+ converter.register_structure_hook_factory(is_mapping, gen_structure_mapping)
+
+ converter.register_structure_hook(ObjectId, lambda v, _: ObjectId(v))
+ configure_union_passthrough(
+ Union[str, bool, int, float, None, bytes, datetime, ObjectId, Int64], converter
+ )
+
+ # datetime inherits from date, so identity unstructure hook used
+ # here to prevent the date unstructure hook running.
+ converter.register_unstructure_hook(datetime, lambda v: v)
+ converter.register_structure_hook(datetime, validate_datetime)
+ converter.register_unstructure_hook(date, lambda v: v.isoformat())
+ converter.register_structure_hook(date, lambda v, _: date.fromisoformat(v))
+
+
+@wrap(BsonConverter)
+def make_converter(*args: Any, **kwargs: Any) -> BsonConverter:
+ kwargs["unstruct_collection_overrides"] = {
+ AbstractSet: list,
+ **kwargs.get("unstruct_collection_overrides", {}),
+ }
+ res = BsonConverter(*args, **kwargs)
+ configure_converter(res)
+
+ return res
diff --git a/lambdas/aws-dd-forwarder-3.127.0/cattrs/preconf/cbor2.py b/lambdas/aws-dd-forwarder-3.127.0/cattrs/preconf/cbor2.py
new file mode 100644
index 0000000..73a9a97
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/cattrs/preconf/cbor2.py
@@ -0,0 +1,50 @@
+"""Preconfigured converters for cbor2."""
+
+from datetime import date, datetime, timezone
+from typing import Any, Type, TypeVar, Union
+
+from cbor2 import dumps, loads
+
+from cattrs._compat import AbstractSet
+
+from ..converters import BaseConverter, Converter
+from ..strategies import configure_union_passthrough
+from . import wrap
+
+T = TypeVar("T")
+
+
+class Cbor2Converter(Converter):
+ def dumps(self, obj: Any, unstructure_as: Any = None, **kwargs: Any) -> bytes:
+ return dumps(self.unstructure(obj, unstructure_as=unstructure_as), **kwargs)
+
+ def loads(self, data: bytes, cl: Type[T], **kwargs: Any) -> T:
+ return self.structure(loads(data, **kwargs), cl)
+
+
+def configure_converter(converter: BaseConverter):
+ """
+ Configure the converter for use with the cbor2 library.
+
+ * datetimes are serialized as timestamp floats
+ * sets are serialized as lists
+ """
+ converter.register_unstructure_hook(datetime, lambda v: v.timestamp())
+ converter.register_structure_hook(
+ datetime, lambda v, _: datetime.fromtimestamp(v, timezone.utc)
+ )
+ converter.register_unstructure_hook(date, lambda v: v.isoformat())
+ converter.register_structure_hook(date, lambda v, _: date.fromisoformat(v))
+ configure_union_passthrough(Union[str, bool, int, float, None, bytes], converter)
+
+
+@wrap(Cbor2Converter)
+def make_converter(*args: Any, **kwargs: Any) -> Cbor2Converter:
+ kwargs["unstruct_collection_overrides"] = {
+ AbstractSet: list,
+ **kwargs.get("unstruct_collection_overrides", {}),
+ }
+ res = Cbor2Converter(*args, **kwargs)
+ configure_converter(res)
+
+ return res
diff --git a/lambdas/aws-dd-forwarder-3.127.0/cattrs/preconf/json.py b/lambdas/aws-dd-forwarder-3.127.0/cattrs/preconf/json.py
new file mode 100644
index 0000000..acc82ae
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/cattrs/preconf/json.py
@@ -0,0 +1,56 @@
+"""Preconfigured converters for the stdlib json."""
+
+from base64 import b85decode, b85encode
+from datetime import date, datetime
+from json import dumps, loads
+from typing import Any, Type, TypeVar, Union
+
+from .._compat import AbstractSet, Counter
+from ..converters import BaseConverter, Converter
+from ..strategies import configure_union_passthrough
+from . import wrap
+
+T = TypeVar("T")
+
+
+class JsonConverter(Converter):
+ def dumps(self, obj: Any, unstructure_as: Any = None, **kwargs: Any) -> str:
+ return dumps(self.unstructure(obj, unstructure_as=unstructure_as), **kwargs)
+
+ def loads(self, data: Union[bytes, str], cl: Type[T], **kwargs: Any) -> T:
+ return self.structure(loads(data, **kwargs), cl)
+
+
+def configure_converter(converter: BaseConverter):
+ """
+ Configure the converter for use with the stdlib json module.
+
+ * bytes are serialized as base85 strings
+ * datetimes are serialized as ISO 8601
+ * counters are serialized as dicts
+ * sets are serialized as lists
+ * union passthrough is configured for unions of strings, bools, ints,
+ floats and None
+ """
+ converter.register_unstructure_hook(
+ bytes, lambda v: (b85encode(v) if v else b"").decode("utf8")
+ )
+ converter.register_structure_hook(bytes, lambda v, _: b85decode(v))
+ converter.register_unstructure_hook(datetime, lambda v: v.isoformat())
+ converter.register_structure_hook(datetime, lambda v, _: datetime.fromisoformat(v))
+ converter.register_unstructure_hook(date, lambda v: v.isoformat())
+ converter.register_structure_hook(date, lambda v, _: date.fromisoformat(v))
+ configure_union_passthrough(Union[str, bool, int, float, None], converter)
+
+
+@wrap(JsonConverter)
+def make_converter(*args: Any, **kwargs: Any) -> JsonConverter:
+ kwargs["unstruct_collection_overrides"] = {
+ AbstractSet: list,
+ Counter: dict,
+ **kwargs.get("unstruct_collection_overrides", {}),
+ }
+ res = JsonConverter(*args, **kwargs)
+ configure_converter(res)
+
+ return res
diff --git a/lambdas/aws-dd-forwarder-3.127.0/cattrs/preconf/msgpack.py b/lambdas/aws-dd-forwarder-3.127.0/cattrs/preconf/msgpack.py
new file mode 100644
index 0000000..dd7c369
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/cattrs/preconf/msgpack.py
@@ -0,0 +1,54 @@
+"""Preconfigured converters for msgpack."""
+
+from datetime import date, datetime, time, timezone
+from typing import Any, Type, TypeVar, Union
+
+from msgpack import dumps, loads
+
+from cattrs._compat import AbstractSet
+
+from ..converters import BaseConverter, Converter
+from ..strategies import configure_union_passthrough
+from . import wrap
+
+T = TypeVar("T")
+
+
+class MsgpackConverter(Converter):
+ def dumps(self, obj: Any, unstructure_as: Any = None, **kwargs: Any) -> bytes:
+ return dumps(self.unstructure(obj, unstructure_as=unstructure_as), **kwargs)
+
+ def loads(self, data: bytes, cl: Type[T], **kwargs: Any) -> T:
+ return self.structure(loads(data, **kwargs), cl)
+
+
+def configure_converter(converter: BaseConverter):
+ """
+ Configure the converter for use with the msgpack library.
+
+ * datetimes are serialized as timestamp floats
+ * sets are serialized as lists
+ """
+ converter.register_unstructure_hook(datetime, lambda v: v.timestamp())
+ converter.register_structure_hook(
+ datetime, lambda v, _: datetime.fromtimestamp(v, timezone.utc)
+ )
+ converter.register_unstructure_hook(
+ date, lambda v: datetime.combine(v, time(tzinfo=timezone.utc)).timestamp()
+ )
+ converter.register_structure_hook(
+ date, lambda v, _: datetime.fromtimestamp(v, timezone.utc).date()
+ )
+ configure_union_passthrough(Union[str, bool, int, float, None, bytes], converter)
+
+
+@wrap(MsgpackConverter)
+def make_converter(*args: Any, **kwargs: Any) -> MsgpackConverter:
+ kwargs["unstruct_collection_overrides"] = {
+ AbstractSet: list,
+ **kwargs.get("unstruct_collection_overrides", {}),
+ }
+ res = MsgpackConverter(*args, **kwargs)
+ configure_converter(res)
+
+ return res
diff --git a/lambdas/aws-dd-forwarder-3.127.0/cattrs/preconf/msgspec.py b/lambdas/aws-dd-forwarder-3.127.0/cattrs/preconf/msgspec.py
new file mode 100644
index 0000000..6ef84d7
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/cattrs/preconf/msgspec.py
@@ -0,0 +1,185 @@
+"""Preconfigured converters for msgspec."""
+
+from __future__ import annotations
+
+from base64 import b64decode
+from datetime import date, datetime
+from enum import Enum
+from functools import partial
+from typing import Any, Callable, TypeVar, Union, get_type_hints
+
+from attrs import has as attrs_has
+from attrs import resolve_types
+from msgspec import Struct, convert, to_builtins
+from msgspec.json import Encoder, decode
+
+from .._compat import (
+ fields,
+ get_args,
+ get_origin,
+ has,
+ is_bare,
+ is_mapping,
+ is_sequence,
+)
+from ..cols import is_namedtuple
+from ..converters import BaseConverter, Converter
+from ..dispatch import UnstructureHook
+from ..fns import identity
+from ..gen import make_hetero_tuple_unstructure_fn
+from ..strategies import configure_union_passthrough
+from . import wrap
+
+T = TypeVar("T")
+
+__all__ = ["MsgspecJsonConverter", "configure_converter", "make_converter"]
+
+
+class MsgspecJsonConverter(Converter):
+ """A converter specialized for the _msgspec_ library."""
+
+ #: The msgspec encoder for dumping.
+ encoder: Encoder = Encoder()
+
+ def dumps(self, obj: Any, unstructure_as: Any = None, **kwargs: Any) -> bytes:
+ """Unstructure and encode `obj` into JSON bytes."""
+ return self.encoder.encode(
+ self.unstructure(obj, unstructure_as=unstructure_as), **kwargs
+ )
+
+ def get_dumps_hook(
+ self, unstructure_as: Any, **kwargs: Any
+ ) -> Callable[[Any], bytes]:
+ """Produce a `dumps` hook for the given type."""
+ unstruct_hook = self.get_unstructure_hook(unstructure_as)
+ if unstruct_hook in (identity, to_builtins):
+ return self.encoder.encode
+ return self.dumps
+
+ def loads(self, data: bytes, cl: type[T], **kwargs: Any) -> T:
+ """Decode and structure `cl` from the provided JSON bytes."""
+ return self.structure(decode(data, **kwargs), cl)
+
+ def get_loads_hook(self, cl: type[T]) -> Callable[[bytes], T]:
+ """Produce a `loads` hook for the given type."""
+ return partial(self.loads, cl=cl)
+
+
+def configure_converter(converter: Converter) -> None:
+ """Configure the converter for the msgspec library.
+
+ * bytes are serialized as base64 strings, directly by msgspec
+ * datetimes and dates are passed through to be serialized as RFC 3339 directly
+ * enums are passed through to msgspec directly
+ * union passthrough configured for str, bool, int, float and None
+ """
+ configure_passthroughs(converter)
+
+ converter.register_unstructure_hook(Struct, to_builtins)
+ converter.register_unstructure_hook(Enum, to_builtins)
+
+ converter.register_structure_hook(Struct, convert)
+ converter.register_structure_hook(bytes, lambda v, _: b64decode(v))
+ converter.register_structure_hook(datetime, lambda v, _: convert(v, datetime))
+ converter.register_structure_hook(date, lambda v, _: date.fromisoformat(v))
+ configure_union_passthrough(Union[str, bool, int, float, None], converter)
+
+
+@wrap(MsgspecJsonConverter)
+def make_converter(*args: Any, **kwargs: Any) -> MsgspecJsonConverter:
+ res = MsgspecJsonConverter(*args, **kwargs)
+ configure_converter(res)
+ return res
+
+
+def configure_passthroughs(converter: Converter) -> None:
+ """Configure optimizing passthroughs.
+
+ A passthrough is when we let msgspec handle something automatically.
+ """
+ converter.register_unstructure_hook(bytes, to_builtins)
+ converter.register_unstructure_hook_factory(is_mapping, mapping_unstructure_factory)
+ converter.register_unstructure_hook_factory(is_sequence, seq_unstructure_factory)
+ converter.register_unstructure_hook_factory(has, attrs_unstructure_factory)
+ converter.register_unstructure_hook_factory(
+ is_namedtuple, namedtuple_unstructure_factory
+ )
+
+
+def seq_unstructure_factory(type, converter: Converter) -> UnstructureHook:
+ """The msgspec unstructure hook factory for sequences."""
+ if is_bare(type):
+ type_arg = Any
+ else:
+ args = get_args(type)
+ type_arg = args[0]
+ handler = converter.get_unstructure_hook(type_arg, cache_result=False)
+
+ if handler in (identity, to_builtins):
+ return handler
+ return converter.gen_unstructure_iterable(type)
+
+
+def mapping_unstructure_factory(type, converter: BaseConverter) -> UnstructureHook:
+ """The msgspec unstructure hook factory for mappings."""
+ if is_bare(type):
+ key_arg = Any
+ val_arg = Any
+ key_handler = converter.get_unstructure_hook(key_arg, cache_result=False)
+ value_handler = converter.get_unstructure_hook(val_arg, cache_result=False)
+ else:
+ args = get_args(type)
+ if len(args) == 2:
+ key_arg, val_arg = args
+ else:
+ # Probably a Counter
+ key_arg, val_arg = args, Any
+ key_handler = converter.get_unstructure_hook(key_arg, cache_result=False)
+ value_handler = converter.get_unstructure_hook(val_arg, cache_result=False)
+
+ if key_handler in (identity, to_builtins) and value_handler in (
+ identity,
+ to_builtins,
+ ):
+ return to_builtins
+ return converter.gen_unstructure_mapping(type)
+
+
+def attrs_unstructure_factory(type: Any, converter: Converter) -> UnstructureHook:
+ """Choose whether to use msgspec handling or our own."""
+ origin = get_origin(type)
+ attribs = fields(origin or type)
+ if attrs_has(type) and any(isinstance(a.type, str) for a in attribs):
+ resolve_types(type)
+ attribs = fields(origin or type)
+
+ if any(
+ attr.name.startswith("_")
+ or (
+ converter.get_unstructure_hook(attr.type, cache_result=False)
+ not in (identity, to_builtins)
+ )
+ for attr in attribs
+ ):
+ return converter.gen_unstructure_attrs_fromdict(type)
+
+ return to_builtins
+
+
+def namedtuple_unstructure_factory(
+ type: type[tuple], converter: BaseConverter
+) -> UnstructureHook:
+ """A hook factory for unstructuring namedtuples, modified for msgspec."""
+
+ if all(
+ converter.get_unstructure_hook(t) in (identity, to_builtins)
+ for t in get_type_hints(type).values()
+ ):
+ return identity
+
+ return make_hetero_tuple_unstructure_fn(
+ type,
+ converter,
+ unstructure_to=tuple,
+ type_args=tuple(get_type_hints(type).values()),
+ )
diff --git a/lambdas/aws-dd-forwarder-3.127.0/cattrs/preconf/orjson.py b/lambdas/aws-dd-forwarder-3.127.0/cattrs/preconf/orjson.py
new file mode 100644
index 0000000..4b595bc
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/cattrs/preconf/orjson.py
@@ -0,0 +1,95 @@
+"""Preconfigured converters for orjson."""
+
+from base64 import b85decode, b85encode
+from datetime import date, datetime
+from enum import Enum
+from functools import partial
+from typing import Any, Type, TypeVar, Union
+
+from orjson import dumps, loads
+
+from .._compat import AbstractSet, is_mapping
+from ..cols import is_namedtuple, namedtuple_unstructure_factory
+from ..converters import BaseConverter, Converter
+from ..fns import identity
+from ..strategies import configure_union_passthrough
+from . import wrap
+
+T = TypeVar("T")
+
+
+class OrjsonConverter(Converter):
+ def dumps(self, obj: Any, unstructure_as: Any = None, **kwargs: Any) -> bytes:
+ return dumps(self.unstructure(obj, unstructure_as=unstructure_as), **kwargs)
+
+ def loads(self, data: Union[bytes, bytearray, memoryview, str], cl: Type[T]) -> T:
+ return self.structure(loads(data), cl)
+
+
+def configure_converter(converter: BaseConverter):
+ """
+ Configure the converter for use with the orjson library.
+
+ * bytes are serialized as base85 strings
+ * datetimes and dates are passed through to be serialized as RFC 3339 by orjson
+ * typed namedtuples are serialized as lists
+ * sets are serialized as lists
+ * string enum mapping keys have special handling
+ * mapping keys are coerced into strings when unstructuring
+
+ .. versionchanged: 24.1.0
+ Add support for typed namedtuples.
+ """
+ converter.register_unstructure_hook(
+ bytes, lambda v: (b85encode(v) if v else b"").decode("utf8")
+ )
+ converter.register_structure_hook(bytes, lambda v, _: b85decode(v))
+
+ converter.register_structure_hook(datetime, lambda v, _: datetime.fromisoformat(v))
+ converter.register_structure_hook(date, lambda v, _: date.fromisoformat(v))
+
+ def gen_unstructure_mapping(cl: Any, unstructure_to=None):
+ key_handler = str
+ args = getattr(cl, "__args__", None)
+ if args:
+ if issubclass(args[0], str) and issubclass(args[0], Enum):
+
+ def key_handler(v):
+ return v.value
+
+ else:
+ # It's possible the handler for the key type has been overridden.
+ # (For example base85 encoding for bytes.)
+ # In that case, we want to use the override.
+
+ kh = converter.get_unstructure_hook(args[0])
+ if kh != identity:
+ key_handler = kh
+
+ return converter.gen_unstructure_mapping(
+ cl, unstructure_to=unstructure_to, key_handler=key_handler
+ )
+
+ converter._unstructure_func.register_func_list(
+ [
+ (is_mapping, gen_unstructure_mapping, True),
+ (
+ is_namedtuple,
+ partial(namedtuple_unstructure_factory, unstructure_to=tuple),
+ "extended",
+ ),
+ ]
+ )
+ configure_union_passthrough(Union[str, bool, int, float, None], converter)
+
+
+@wrap(OrjsonConverter)
+def make_converter(*args: Any, **kwargs: Any) -> OrjsonConverter:
+ kwargs["unstruct_collection_overrides"] = {
+ AbstractSet: list,
+ **kwargs.get("unstruct_collection_overrides", {}),
+ }
+ res = OrjsonConverter(*args, **kwargs)
+ configure_converter(res)
+
+ return res
diff --git a/lambdas/aws-dd-forwarder-3.127.0/cattrs/preconf/pyyaml.py b/lambdas/aws-dd-forwarder-3.127.0/cattrs/preconf/pyyaml.py
new file mode 100644
index 0000000..7374625
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/cattrs/preconf/pyyaml.py
@@ -0,0 +1,72 @@
+"""Preconfigured converters for pyyaml."""
+
+from datetime import date, datetime
+from functools import partial
+from typing import Any, Type, TypeVar, Union
+
+from yaml import safe_dump, safe_load
+
+from .._compat import FrozenSetSubscriptable
+from ..cols import is_namedtuple, namedtuple_unstructure_factory
+from ..converters import BaseConverter, Converter
+from ..strategies import configure_union_passthrough
+from . import validate_datetime, wrap
+
+T = TypeVar("T")
+
+
+def validate_date(v, _):
+ if not isinstance(v, date):
+ raise ValueError(f"Expected date, got {v}")
+ return v
+
+
+class PyyamlConverter(Converter):
+ def dumps(self, obj: Any, unstructure_as: Any = None, **kwargs: Any) -> str:
+ return safe_dump(self.unstructure(obj, unstructure_as=unstructure_as), **kwargs)
+
+ def loads(self, data: str, cl: Type[T]) -> T:
+ return self.structure(safe_load(data), cl)
+
+
+def configure_converter(converter: BaseConverter):
+ """
+ Configure the converter for use with the pyyaml library.
+
+ * frozensets are serialized as lists
+ * string enums are converted into strings explicitly
+ * datetimes and dates are validated
+ * typed namedtuples are serialized as lists
+
+ .. versionchanged: 24.1.0
+ Add support for typed namedtuples.
+ """
+ converter.register_unstructure_hook(
+ str, lambda v: v if v.__class__ is str else v.value
+ )
+
+ # datetime inherits from date, so identity unstructure hook used
+ # here to prevent the date unstructure hook running.
+ converter.register_unstructure_hook(datetime, lambda v: v)
+ converter.register_structure_hook(datetime, validate_datetime)
+ converter.register_structure_hook(date, validate_date)
+
+ converter.register_unstructure_hook_factory(is_namedtuple)(
+ partial(namedtuple_unstructure_factory, unstructure_to=tuple)
+ )
+
+ configure_union_passthrough(
+ Union[str, bool, int, float, None, bytes, datetime, date], converter
+ )
+
+
+@wrap(PyyamlConverter)
+def make_converter(*args: Any, **kwargs: Any) -> PyyamlConverter:
+ kwargs["unstruct_collection_overrides"] = {
+ FrozenSetSubscriptable: list,
+ **kwargs.get("unstruct_collection_overrides", {}),
+ }
+ res = PyyamlConverter(*args, **kwargs)
+ configure_converter(res)
+
+ return res
diff --git a/lambdas/aws-dd-forwarder-3.127.0/cattrs/preconf/tomlkit.py b/lambdas/aws-dd-forwarder-3.127.0/cattrs/preconf/tomlkit.py
new file mode 100644
index 0000000..0d0180b
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/cattrs/preconf/tomlkit.py
@@ -0,0 +1,87 @@
+"""Preconfigured converters for tomlkit."""
+
+from base64 import b85decode, b85encode
+from datetime import date, datetime
+from enum import Enum
+from operator import attrgetter
+from typing import Any, Type, TypeVar, Union
+
+from tomlkit import dumps, loads
+from tomlkit.items import Float, Integer, String
+
+from cattrs._compat import AbstractSet, is_mapping
+
+from ..converters import BaseConverter, Converter
+from ..strategies import configure_union_passthrough
+from . import validate_datetime, wrap
+
+T = TypeVar("T")
+_enum_value_getter = attrgetter("_value_")
+
+
+class TomlkitConverter(Converter):
+ def dumps(self, obj: Any, unstructure_as: Any = None, **kwargs: Any) -> str:
+ return dumps(self.unstructure(obj, unstructure_as=unstructure_as), **kwargs)
+
+ def loads(self, data: str, cl: Type[T]) -> T:
+ return self.structure(loads(data), cl)
+
+
+def configure_converter(converter: BaseConverter):
+ """
+ Configure the converter for use with the tomlkit library.
+
+ * bytes are serialized as base85 strings
+ * sets are serialized as lists
+ * tuples are serializas as lists
+ * mapping keys are coerced into strings when unstructuring
+ """
+ converter.register_structure_hook(bytes, lambda v, _: b85decode(v))
+ converter.register_unstructure_hook(
+ bytes, lambda v: (b85encode(v) if v else b"").decode("utf8")
+ )
+
+ def gen_unstructure_mapping(cl: Any, unstructure_to=None):
+ key_handler = str
+ args = getattr(cl, "__args__", None)
+ if args:
+ # Currently, tomlkit has inconsistent behavior on 3.11
+ # so we paper over it here.
+ # https://github.com/sdispater/tomlkit/issues/237
+ if issubclass(args[0], str):
+ key_handler = _enum_value_getter if issubclass(args[0], Enum) else None
+ elif issubclass(args[0], bytes):
+
+ def key_handler(k: bytes):
+ return b85encode(k).decode("utf8")
+
+ return converter.gen_unstructure_mapping(
+ cl, unstructure_to=unstructure_to, key_handler=key_handler
+ )
+
+ converter._unstructure_func.register_func_list(
+ [(is_mapping, gen_unstructure_mapping, True)]
+ )
+
+ # datetime inherits from date, so identity unstructure hook used
+ # here to prevent the date unstructure hook running.
+ converter.register_unstructure_hook(datetime, lambda v: v)
+ converter.register_structure_hook(datetime, validate_datetime)
+ converter.register_unstructure_hook(date, lambda v: v.isoformat())
+ converter.register_structure_hook(date, lambda v, _: date.fromisoformat(v))
+ configure_union_passthrough(
+ Union[str, String, bool, int, Integer, float, Float], converter
+ )
+
+
+@wrap(TomlkitConverter)
+def make_converter(*args: Any, **kwargs: Any) -> TomlkitConverter:
+ kwargs["unstruct_collection_overrides"] = {
+ AbstractSet: list,
+ tuple: list,
+ **kwargs.get("unstruct_collection_overrides", {}),
+ }
+ res = TomlkitConverter(*args, **kwargs)
+ configure_converter(res)
+
+ return res
diff --git a/lambdas/aws-dd-forwarder-3.127.0/cattrs/preconf/ujson.py b/lambdas/aws-dd-forwarder-3.127.0/cattrs/preconf/ujson.py
new file mode 100644
index 0000000..7256d52
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/cattrs/preconf/ujson.py
@@ -0,0 +1,55 @@
+"""Preconfigured converters for ujson."""
+
+from base64 import b85decode, b85encode
+from datetime import date, datetime
+from typing import Any, AnyStr, Type, TypeVar, Union
+
+from ujson import dumps, loads
+
+from cattrs._compat import AbstractSet
+
+from ..converters import BaseConverter, Converter
+from ..strategies import configure_union_passthrough
+from . import wrap
+
+T = TypeVar("T")
+
+
+class UjsonConverter(Converter):
+ def dumps(self, obj: Any, unstructure_as: Any = None, **kwargs: Any) -> str:
+ return dumps(self.unstructure(obj, unstructure_as=unstructure_as), **kwargs)
+
+ def loads(self, data: AnyStr, cl: Type[T], **kwargs: Any) -> T:
+ return self.structure(loads(data, **kwargs), cl)
+
+
+def configure_converter(converter: BaseConverter):
+ """
+ Configure the converter for use with the ujson library.
+
+ * bytes are serialized as base64 strings
+ * datetimes are serialized as ISO 8601
+ * sets are serialized as lists
+ """
+ converter.register_unstructure_hook(
+ bytes, lambda v: (b85encode(v) if v else b"").decode("utf8")
+ )
+ converter.register_structure_hook(bytes, lambda v, _: b85decode(v))
+
+ converter.register_unstructure_hook(datetime, lambda v: v.isoformat())
+ converter.register_structure_hook(datetime, lambda v, _: datetime.fromisoformat(v))
+ converter.register_unstructure_hook(date, lambda v: v.isoformat())
+ converter.register_structure_hook(date, lambda v, _: date.fromisoformat(v))
+ configure_union_passthrough(Union[str, bool, int, float, None], converter)
+
+
+@wrap(UjsonConverter)
+def make_converter(*args: Any, **kwargs: Any) -> UjsonConverter:
+ kwargs["unstruct_collection_overrides"] = {
+ AbstractSet: list,
+ **kwargs.get("unstruct_collection_overrides", {}),
+ }
+ res = UjsonConverter(*args, **kwargs)
+ configure_converter(res)
+
+ return res
diff --git a/lambdas/aws-dd-forwarder-3.127.0/cattrs/py.typed b/lambdas/aws-dd-forwarder-3.127.0/cattrs/py.typed
new file mode 100644
index 0000000..e69de29
diff --git a/lambdas/aws-dd-forwarder-3.127.0/cattrs/strategies/__init__.py b/lambdas/aws-dd-forwarder-3.127.0/cattrs/strategies/__init__.py
new file mode 100644
index 0000000..9caf073
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/cattrs/strategies/__init__.py
@@ -0,0 +1,12 @@
+"""High level strategies for converters."""
+
+from ._class_methods import use_class_methods
+from ._subclasses import include_subclasses
+from ._unions import configure_tagged_union, configure_union_passthrough
+
+__all__ = [
+ "configure_tagged_union",
+ "configure_union_passthrough",
+ "include_subclasses",
+ "use_class_methods",
+]
diff --git a/lambdas/aws-dd-forwarder-3.127.0/cattrs/strategies/_class_methods.py b/lambdas/aws-dd-forwarder-3.127.0/cattrs/strategies/_class_methods.py
new file mode 100644
index 0000000..c2b6325
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/cattrs/strategies/_class_methods.py
@@ -0,0 +1,64 @@
+"""Strategy for using class-specific (un)structuring methods."""
+
+from inspect import signature
+from typing import Any, Callable, Optional, Type, TypeVar
+
+from .. import BaseConverter
+
+T = TypeVar("T")
+
+
+def use_class_methods(
+ converter: BaseConverter,
+ structure_method_name: Optional[str] = None,
+ unstructure_method_name: Optional[str] = None,
+) -> None:
+ """
+ Configure the converter such that dedicated methods are used for (un)structuring
+ the instance of a class if such methods are available. The default (un)structuring
+ will be applied if such an (un)structuring methods cannot be found.
+
+ :param converter: The `Converter` on which this strategy is applied. You can use
+ :class:`cattrs.BaseConverter` or any other derived class.
+ :param structure_method_name: Optional string with the name of the class method
+ which should be used for structuring. If not provided, no class method will be
+ used for structuring.
+ :param unstructure_method_name: Optional string with the name of the class method
+ which should be used for unstructuring. If not provided, no class method will
+ be used for unstructuring.
+
+ If you want to (un)structured nested objects, just append a converter parameter
+ to your (un)structuring methods and you will receive the converter there.
+
+ .. versionadded:: 23.2.0
+ """
+
+ if structure_method_name:
+
+ def make_class_method_structure(cl: Type[T]) -> Callable[[Any, Type[T]], T]:
+ fn = getattr(cl, structure_method_name)
+ n_parameters = len(signature(fn).parameters)
+ if n_parameters == 1:
+ return lambda v, _: fn(v)
+ if n_parameters == 2:
+ return lambda v, _: fn(v, converter)
+ raise TypeError("Provide a class method with one or two arguments.")
+
+ converter.register_structure_hook_factory(
+ lambda t: hasattr(t, structure_method_name), make_class_method_structure
+ )
+
+ if unstructure_method_name:
+
+ def make_class_method_unstructure(cl: Type[T]) -> Callable[[T], T]:
+ fn = getattr(cl, unstructure_method_name)
+ n_parameters = len(signature(fn).parameters)
+ if n_parameters == 1:
+ return fn
+ if n_parameters == 2:
+ return lambda self_: fn(self_, converter)
+ raise TypeError("Provide a method with no or one argument.")
+
+ converter.register_unstructure_hook_factory(
+ lambda t: hasattr(t, unstructure_method_name), make_class_method_unstructure
+ )
diff --git a/lambdas/aws-dd-forwarder-3.127.0/cattrs/strategies/_subclasses.py b/lambdas/aws-dd-forwarder-3.127.0/cattrs/strategies/_subclasses.py
new file mode 100644
index 0000000..06a92af
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/cattrs/strategies/_subclasses.py
@@ -0,0 +1,238 @@
+"""Strategies for customizing subclass behaviors."""
+
+from __future__ import annotations
+
+from gc import collect
+from typing import Any, Callable, TypeVar, Union
+
+from ..converters import BaseConverter
+from ..gen import AttributeOverride, make_dict_structure_fn, make_dict_unstructure_fn
+from ..gen._consts import already_generating
+
+
+def _make_subclasses_tree(cl: type) -> list[type]:
+ return [cl] + [
+ sscl for scl in cl.__subclasses__() for sscl in _make_subclasses_tree(scl)
+ ]
+
+
+def _has_subclasses(cl: type, given_subclasses: tuple[type, ...]) -> bool:
+ """Whether the given class has subclasses from `given_subclasses`."""
+ actual = set(cl.__subclasses__())
+ given = set(given_subclasses)
+ return bool(actual & given)
+
+
+def _get_union_type(cl: type, given_subclasses_tree: tuple[type]) -> type | None:
+ actual_subclass_tree = tuple(_make_subclasses_tree(cl))
+ class_tree = tuple(set(actual_subclass_tree) & set(given_subclasses_tree))
+ return Union[class_tree] if len(class_tree) >= 2 else None
+
+
+C = TypeVar("C", bound=BaseConverter)
+
+
+def include_subclasses(
+ cl: type,
+ converter: C,
+ subclasses: tuple[type, ...] | None = None,
+ union_strategy: Callable[[Any, C], Any] | None = None,
+ overrides: dict[str, AttributeOverride] | None = None,
+) -> None:
+ """
+ Configure the converter so that the attrs/dataclass `cl` is un/structured as if it
+ was a union of itself and all its subclasses that are defined at the time when this
+ strategy is applied.
+
+ :param cl: A base `attrs` or `dataclass` class.
+ :param converter: The `Converter` on which this strategy is applied. Do note that
+ the strategy does not work for a :class:`cattrs.BaseConverter`.
+ :param subclasses: A tuple of sublcasses whose ancestor is `cl`. If left as `None`,
+ subclasses are detected using recursively the `__subclasses__` method of `cl`
+ and its descendents.
+ :param union_strategy: A callable of two arguments passed by position
+ (`subclass_union`, `converter`) that defines the union strategy to use to
+ disambiguate the subclasses union. If `None` (the default), the automatic unique
+ field disambiguation is used which means that every single subclass
+ participating in the union must have an attribute name that does not exist in
+ any other sibling class.
+ :param overrides: a mapping of `cl` attribute names to overrides (instantiated with
+ :func:`cattrs.gen.override`) to customize un/structuring.
+
+ .. versionadded:: 23.1.0
+ .. versionchanged:: 24.1.0
+ When overrides are not provided, hooks for individual classes are retrieved from
+ the converter instead of generated with no overrides, using converter defaults.
+ """
+ # Due to https://github.com/python-attrs/attrs/issues/1047
+ collect()
+ if subclasses is not None:
+ parent_subclass_tree = (cl, *subclasses)
+ else:
+ parent_subclass_tree = tuple(_make_subclasses_tree(cl))
+
+ if union_strategy is None:
+ _include_subclasses_without_union_strategy(
+ cl, converter, parent_subclass_tree, overrides
+ )
+ else:
+ _include_subclasses_with_union_strategy(
+ converter, parent_subclass_tree, union_strategy, overrides
+ )
+
+
+def _include_subclasses_without_union_strategy(
+ cl,
+ converter: BaseConverter,
+ parent_subclass_tree: tuple[type],
+ overrides: dict[str, AttributeOverride] | None,
+):
+ # The iteration approach is required if subclasses are more than one level deep:
+ for cl in parent_subclass_tree:
+ # We re-create a reduced union type to handle the following case:
+ #
+ # converter.structure(d, as=Child)
+ #
+ # In the above, the `as=Child` argument will be transformed to a union type of
+ # itself and its subtypes, that way we guarantee that the returned object will
+ # not be the parent.
+ subclass_union = _get_union_type(cl, parent_subclass_tree)
+
+ def cls_is_cl(cls, _cl=cl):
+ return cls is _cl
+
+ if overrides is not None:
+ base_struct_hook = make_dict_structure_fn(cl, converter, **overrides)
+ base_unstruct_hook = make_dict_unstructure_fn(cl, converter, **overrides)
+ else:
+ base_struct_hook = converter.get_structure_hook(cl)
+ base_unstruct_hook = converter.get_unstructure_hook(cl)
+
+ if subclass_union is None:
+
+ def struct_hook(val: dict, _, _cl=cl, _base_hook=base_struct_hook) -> cl:
+ return _base_hook(val, _cl)
+
+ else:
+ dis_fn = converter._get_dis_func(subclass_union, overrides=overrides)
+
+ def struct_hook(
+ val: dict,
+ _,
+ _c=converter,
+ _cl=cl,
+ _base_hook=base_struct_hook,
+ _dis_fn=dis_fn,
+ ) -> cl:
+ """
+ If val is disambiguated to the class `cl`, use its base hook.
+
+ If val is disambiguated to a subclass, dispatch on its exact runtime
+ type.
+ """
+ dis_cl = _dis_fn(val)
+ if dis_cl is _cl:
+ return _base_hook(val, _cl)
+ return _c.structure(val, dis_cl)
+
+ def unstruct_hook(
+ val: parent_subclass_tree[0],
+ _c=converter,
+ _cl=cl,
+ _base_hook=base_unstruct_hook,
+ ) -> dict:
+ """
+ If val is an instance of the class `cl`, use the hook.
+
+ If val is an instance of a subclass, dispatch on its exact runtime type.
+ """
+ if val.__class__ is _cl:
+ return _base_hook(val)
+ return _c.unstructure(val, unstructure_as=val.__class__)
+
+ # This needs to use function dispatch, using singledispatch will again
+ # match A and all subclasses, which is not what we want.
+ converter.register_structure_hook_func(cls_is_cl, struct_hook)
+ converter.register_unstructure_hook_func(cls_is_cl, unstruct_hook)
+
+
+def _include_subclasses_with_union_strategy(
+ converter: C,
+ union_classes: tuple[type, ...],
+ union_strategy: Callable[[Any, C], Any],
+ overrides: dict[str, AttributeOverride] | None,
+):
+ """
+ This function is tricky because we're dealing with what is essentially a circular
+ reference.
+
+ We need to generate a structure hook for a class that is both:
+ * specific for that particular class and its own fields
+ * but should handle specific functions for all its descendants too
+
+ Hence the dance with registering below.
+ """
+
+ parent_classes = [cl for cl in union_classes if _has_subclasses(cl, union_classes)]
+ if not parent_classes:
+ return
+
+ original_unstruct_hooks = {}
+ original_struct_hooks = {}
+ for cl in union_classes:
+ # In the first pass, every class gets its own unstructure function according to
+ # the overrides.
+ # We just generate the hooks, and do not register them. This allows us to
+ # manipulate the _already_generating set to force runtime dispatch.
+ already_generating.working_set = set(union_classes) - {cl}
+ try:
+ if overrides is not None:
+ unstruct_hook = make_dict_unstructure_fn(cl, converter, **overrides)
+ struct_hook = make_dict_structure_fn(cl, converter, **overrides)
+ else:
+ unstruct_hook = converter.get_unstructure_hook(cl, cache_result=False)
+ struct_hook = converter.get_structure_hook(cl, cache_result=False)
+ finally:
+ already_generating.working_set = set()
+ original_unstruct_hooks[cl] = unstruct_hook
+ original_struct_hooks[cl] = struct_hook
+
+ # Now that's done, we can register all the hooks and generate the
+ # union handler. The union handler needs them.
+ final_union = Union[union_classes] # type: ignore
+
+ for cl, hook in original_unstruct_hooks.items():
+
+ def cls_is_cl(cls, _cl=cl):
+ return cls is _cl
+
+ converter.register_unstructure_hook_func(cls_is_cl, hook)
+
+ for cl, hook in original_struct_hooks.items():
+
+ def cls_is_cl(cls, _cl=cl):
+ return cls is _cl
+
+ converter.register_structure_hook_func(cls_is_cl, hook)
+
+ union_strategy(final_union, converter)
+ unstruct_hook = converter.get_unstructure_hook(final_union)
+ struct_hook = converter.get_structure_hook(final_union)
+
+ for cl in union_classes:
+ # In the second pass, we overwrite the hooks with the union hook.
+
+ def cls_is_cl(cls, _cl=cl):
+ return cls is _cl
+
+ converter.register_unstructure_hook_func(cls_is_cl, unstruct_hook)
+ subclasses = tuple([c for c in union_classes if issubclass(c, cl)])
+ if len(subclasses) > 1:
+ u = Union[subclasses] # type: ignore
+ union_strategy(u, converter)
+ struct_hook = converter.get_structure_hook(u)
+
+ def sh(payload: dict, _, _u=u, _s=struct_hook) -> cl:
+ return _s(payload, _u)
+
+ converter.register_structure_hook_func(cls_is_cl, sh)
diff --git a/lambdas/aws-dd-forwarder-3.127.0/cattrs/strategies/_unions.py b/lambdas/aws-dd-forwarder-3.127.0/cattrs/strategies/_unions.py
new file mode 100644
index 0000000..f0d270d
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/cattrs/strategies/_unions.py
@@ -0,0 +1,258 @@
+from collections import defaultdict
+from typing import Any, Callable, Dict, Literal, Type, Union
+
+from attrs import NOTHING
+
+from cattrs import BaseConverter
+from cattrs._compat import get_newtype_base, is_literal, is_subclass, is_union_type
+
+__all__ = [
+ "default_tag_generator",
+ "configure_tagged_union",
+ "configure_union_passthrough",
+]
+
+
+def default_tag_generator(typ: Type) -> str:
+ """Return the class name."""
+ return typ.__name__
+
+
+def configure_tagged_union(
+ union: Any,
+ converter: BaseConverter,
+ tag_generator: Callable[[Type], str] = default_tag_generator,
+ tag_name: str = "_type",
+ default: Union[Type, Literal[NOTHING]] = NOTHING,
+) -> None:
+ """
+ Configure the converter so that `union` (which should be a union) is
+ un/structured with the help of an additional piece of data in the
+ unstructured payload, the tag.
+
+ :param converter: The converter to apply the strategy to.
+ :param tag_generator: A `tag_generator` function is used to map each
+ member of the union to a tag, which is then included in the
+ unstructured payload. The default tag generator returns the name of
+ the class.
+ :param tag_name: The key under which the tag will be set in the
+ unstructured payload. By default, `'_type'`.
+ :param default: An optional class to be used if the tag information
+ is not present when structuring.
+
+ The tagged union strategy currently only works with the dict
+ un/structuring base strategy.
+
+ .. versionadded:: 23.1.0
+ """
+ args = union.__args__
+ tag_to_hook = {}
+ exact_cl_unstruct_hooks = {}
+ for cl in args:
+ tag = tag_generator(cl)
+ struct_handler = converter.get_structure_hook(cl)
+ unstruct_handler = converter.get_unstructure_hook(cl)
+
+ def structure_union_member(val: dict, _cl=cl, _h=struct_handler) -> cl:
+ return _h(val, _cl)
+
+ def unstructure_union_member(val: union, _h=unstruct_handler) -> dict:
+ return _h(val)
+
+ tag_to_hook[tag] = structure_union_member
+ exact_cl_unstruct_hooks[cl] = unstructure_union_member
+
+ cl_to_tag = {cl: tag_generator(cl) for cl in args}
+
+ if default is not NOTHING:
+ default_handler = converter.get_structure_hook(default)
+
+ def structure_default(val: dict, _cl=default, _h=default_handler):
+ return _h(val, _cl)
+
+ tag_to_hook = defaultdict(lambda: structure_default, tag_to_hook)
+ cl_to_tag = defaultdict(lambda: default, cl_to_tag)
+
+ def unstructure_tagged_union(
+ val: union,
+ _exact_cl_unstruct_hooks=exact_cl_unstruct_hooks,
+ _cl_to_tag=cl_to_tag,
+ _tag_name=tag_name,
+ ) -> Dict:
+ res = _exact_cl_unstruct_hooks[val.__class__](val)
+ res[_tag_name] = _cl_to_tag[val.__class__]
+ return res
+
+ if default is NOTHING:
+ if getattr(converter, "forbid_extra_keys", False):
+
+ def structure_tagged_union(
+ val: dict, _, _tag_to_cl=tag_to_hook, _tag_name=tag_name
+ ) -> union:
+ val = val.copy()
+ return _tag_to_cl[val.pop(_tag_name)](val)
+
+ else:
+
+ def structure_tagged_union(
+ val: dict, _, _tag_to_cl=tag_to_hook, _tag_name=tag_name
+ ) -> union:
+ return _tag_to_cl[val[_tag_name]](val)
+
+ else:
+ if getattr(converter, "forbid_extra_keys", False):
+
+ def structure_tagged_union(
+ val: dict,
+ _,
+ _tag_to_hook=tag_to_hook,
+ _tag_name=tag_name,
+ _dh=default_handler,
+ _default=default,
+ ) -> union:
+ if _tag_name in val:
+ val = val.copy()
+ return _tag_to_hook[val.pop(_tag_name)](val)
+ return _dh(val, _default)
+
+ else:
+
+ def structure_tagged_union(
+ val: dict,
+ _,
+ _tag_to_hook=tag_to_hook,
+ _tag_name=tag_name,
+ _dh=default_handler,
+ _default=default,
+ ) -> union:
+ if _tag_name in val:
+ return _tag_to_hook[val[_tag_name]](val)
+ return _dh(val, _default)
+
+ converter.register_unstructure_hook(union, unstructure_tagged_union)
+ converter.register_structure_hook(union, structure_tagged_union)
+
+
+def configure_union_passthrough(union: Any, converter: BaseConverter) -> None:
+ """
+ Configure the converter to support validating and passing through unions of the
+ provided types and their subsets.
+
+ For example, all mature JSON libraries natively support producing unions of ints,
+ floats, Nones, and strings. Using this strategy, a converter can be configured
+ to efficiently validate and pass through unions containing these types.
+
+ The most important point is that another library (in this example the JSON
+ library) handles producing the union, and the converter is configured to just
+ validate it.
+
+ Literals of provided types are also supported, and are checked by value.
+
+ NewTypes of provided types are also supported.
+
+ The strategy is designed to be O(1) in execution time, and independent of the
+ ordering of types in the union.
+
+ If the union contains a class and one or more of its subclasses, the subclasses
+ will also be included when validating the superclass.
+
+ .. versionadded:: 23.2.0
+ """
+ args = set(union.__args__)
+
+ def make_structure_native_union(exact_type: Any) -> Callable:
+ # `exact_type` is likely to be a subset of the entire configured union (`args`).
+ literal_values = {
+ v for t in exact_type.__args__ if is_literal(t) for v in t.__args__
+ }
+
+ # We have no idea what the actual type of `val` will be, so we can't
+ # use it blindly with an `in` check since it might not be hashable.
+ # So we do an additional check when handling literals.
+ # Note: do no use `literal_values` here, since {0, False} gets reduced to {0}
+ literal_classes = {
+ v.__class__
+ for t in exact_type.__args__
+ if is_literal(t)
+ for v in t.__args__
+ }
+
+ non_literal_classes = {
+ get_newtype_base(t) or t
+ for t in exact_type.__args__
+ if not is_literal(t) and ((get_newtype_base(t) or t) in args)
+ }
+
+ # We augment the set of allowed classes with any configured subclasses of
+ # the exact subclasses.
+ non_literal_classes |= {
+ a for a in args if any(is_subclass(a, c) for c in non_literal_classes)
+ }
+
+ # We check for spillover - union types not handled by the strategy.
+ # If spillover exists and we fail to validate our types, we call
+ # further into the converter with the rest.
+ spillover = {
+ a
+ for a in exact_type.__args__
+ if (get_newtype_base(a) or a) not in non_literal_classes
+ and not is_literal(a)
+ }
+
+ if spillover:
+ spillover_type = (
+ Union[tuple(spillover)] if len(spillover) > 1 else next(iter(spillover))
+ )
+
+ def structure_native_union(
+ val: Any,
+ _: Any,
+ classes=non_literal_classes,
+ vals=literal_values,
+ converter=converter,
+ spillover=spillover_type,
+ ) -> exact_type:
+ if val.__class__ in literal_classes and val in vals:
+ return val
+ if val.__class__ in classes:
+ return val
+ return converter.structure(val, spillover)
+
+ else:
+
+ def structure_native_union(
+ val: Any, _: Any, classes=non_literal_classes, vals=literal_values
+ ) -> exact_type:
+ if val.__class__ in literal_classes and val in vals:
+ return val
+ if val.__class__ in classes:
+ return val
+ raise TypeError(f"{val} ({val.__class__}) not part of {_}")
+
+ return structure_native_union
+
+ def contains_native_union(exact_type: Any) -> bool:
+ """Can we handle this type?"""
+ if is_union_type(exact_type):
+ type_args = set(exact_type.__args__)
+ # We special case optionals, since they are very common
+ # and are handled a little more efficiently by default.
+ if len(type_args) == 2 and type(None) in type_args:
+ return False
+
+ literal_classes = {
+ lit_arg.__class__
+ for t in type_args
+ if is_literal(t)
+ for lit_arg in t.__args__
+ }
+ non_literal_types = {
+ get_newtype_base(t) or t for t in type_args if not is_literal(t)
+ }
+
+ return (literal_classes | non_literal_types) & args
+ return False
+
+ converter.register_structure_hook_factory(
+ contains_native_union, make_structure_native_union
+ )
diff --git a/lambdas/aws-dd-forwarder-3.127.0/cattrs/v.py b/lambdas/aws-dd-forwarder-3.127.0/cattrs/v.py
new file mode 100644
index 0000000..c3ab18c
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/cattrs/v.py
@@ -0,0 +1,112 @@
+"""Cattrs validation."""
+
+from typing import Callable, List, Union
+
+from .errors import (
+ ClassValidationError,
+ ForbiddenExtraKeysError,
+ IterableValidationError,
+)
+
+__all__ = ["format_exception", "transform_error"]
+
+
+def format_exception(exc: BaseException, type: Union[type, None]) -> str:
+ """The default exception formatter, handling the most common exceptions.
+
+ The following exceptions are handled specially:
+
+ * `KeyErrors` (`required field missing`)
+ * `ValueErrors` (`invalid value for type, expected ` or just `invalid value`)
+ * `TypeErrors` (`invalid value for type, expected ` and a couple special
+ cases for iterables)
+ * `cattrs.ForbiddenExtraKeysError`
+ * some `AttributeErrors` (special cased for structing mappings)
+ """
+ if isinstance(exc, KeyError):
+ res = "required field missing"
+ elif isinstance(exc, ValueError):
+ if type is not None:
+ tn = type.__name__ if hasattr(type, "__name__") else repr(type)
+ res = f"invalid value for type, expected {tn}"
+ else:
+ res = "invalid value"
+ elif isinstance(exc, TypeError):
+ if type is None:
+ if exc.args[0].endswith("object is not iterable"):
+ res = "invalid value for type, expected an iterable"
+ else:
+ res = f"invalid type ({exc})"
+ else:
+ tn = type.__name__ if hasattr(type, "__name__") else repr(type)
+ res = f"invalid value for type, expected {tn}"
+ elif isinstance(exc, ForbiddenExtraKeysError):
+ res = f"extra fields found ({', '.join(exc.extra_fields)})"
+ elif isinstance(exc, AttributeError) and exc.args[0].endswith(
+ "object has no attribute 'items'"
+ ):
+ # This was supposed to be a mapping (and have .items()) but it something else.
+ res = "expected a mapping"
+ elif isinstance(exc, AttributeError) and exc.args[0].endswith(
+ "object has no attribute 'copy'"
+ ):
+ # This was supposed to be a mapping (and have .copy()) but it something else.
+ # Used for TypedDicts.
+ res = "expected a mapping"
+ else:
+ res = f"unknown error ({exc})"
+
+ return res
+
+
+def transform_error(
+ exc: Union[ClassValidationError, IterableValidationError, BaseException],
+ path: str = "$",
+ format_exception: Callable[
+ [BaseException, Union[type, None]], str
+ ] = format_exception,
+) -> List[str]:
+ """Transform an exception into a list of error messages.
+
+ To get detailed error messages, the exception should be produced by a converter
+ with `detailed_validation` set.
+
+ By default, the error messages are in the form of `{description} @ {path}`.
+
+ While traversing the exception and subexceptions, the path is formed:
+
+ * by appending `.{field_name}` for fields in classes
+ * by appending `[{int}]` for indices in iterables, like lists
+ * by appending `[{str}]` for keys in mappings, like dictionaries
+
+ :param exc: The exception to transform into error messages.
+ :param path: The root path to use.
+ :param format_exception: A callable to use to transform `Exceptions` into
+ string descriptions of errors.
+
+ .. versionadded:: 23.1.0
+ """
+ errors = []
+ if isinstance(exc, IterableValidationError):
+ with_notes, without = exc.group_exceptions()
+ for exc, note in with_notes:
+ p = f"{path}[{note.index!r}]"
+ if isinstance(exc, (ClassValidationError, IterableValidationError)):
+ errors.extend(transform_error(exc, p, format_exception))
+ else:
+ errors.append(f"{format_exception(exc, note.type)} @ {p}")
+ for exc in without:
+ errors.append(f"{format_exception(exc, None)} @ {path}")
+ elif isinstance(exc, ClassValidationError):
+ with_notes, without = exc.group_exceptions()
+ for exc, note in with_notes:
+ p = f"{path}.{note.name}"
+ if isinstance(exc, (ClassValidationError, IterableValidationError)):
+ errors.extend(transform_error(exc, p, format_exception))
+ else:
+ errors.append(f"{format_exception(exc, note.type)} @ {p}")
+ for exc in without:
+ errors.append(f"{format_exception(exc, None)} @ {path}")
+ else:
+ errors.append(f"{format_exception(exc, None)} @ {path}")
+ return errors
diff --git a/lambdas/aws-dd-forwarder-3.127.0/certifi-2024.8.30.dist-info/INSTALLER b/lambdas/aws-dd-forwarder-3.127.0/certifi-2024.8.30.dist-info/INSTALLER
new file mode 100644
index 0000000..a1b589e
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/certifi-2024.8.30.dist-info/INSTALLER
@@ -0,0 +1 @@
+pip
diff --git a/lambdas/aws-dd-forwarder-3.127.0/certifi-2024.8.30.dist-info/LICENSE b/lambdas/aws-dd-forwarder-3.127.0/certifi-2024.8.30.dist-info/LICENSE
new file mode 100644
index 0000000..62b076c
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/certifi-2024.8.30.dist-info/LICENSE
@@ -0,0 +1,20 @@
+This package contains a modified version of ca-bundle.crt:
+
+ca-bundle.crt -- Bundle of CA Root Certificates
+
+This is a bundle of X.509 certificates of public Certificate Authorities
+(CA). These were automatically extracted from Mozilla's root certificates
+file (certdata.txt). This file can be found in the mozilla source tree:
+https://hg.mozilla.org/mozilla-central/file/tip/security/nss/lib/ckfw/builtins/certdata.txt
+It contains the certificates in PEM format and therefore
+can be directly used with curl / libcurl / php_curl, or with
+an Apache+mod_ssl webserver for SSL client authentication.
+Just configure this file as the SSLCACertificateFile.#
+
+***** BEGIN LICENSE BLOCK *****
+This Source Code Form is subject to the terms of the Mozilla Public License,
+v. 2.0. If a copy of the MPL was not distributed with this file, You can obtain
+one at http://mozilla.org/MPL/2.0/.
+
+***** END LICENSE BLOCK *****
+@(#) $RCSfile: certdata.txt,v $ $Revision: 1.80 $ $Date: 2011/11/03 15:11:58 $
diff --git a/lambdas/aws-dd-forwarder-3.127.0/certifi-2024.8.30.dist-info/METADATA b/lambdas/aws-dd-forwarder-3.127.0/certifi-2024.8.30.dist-info/METADATA
new file mode 100644
index 0000000..0a3a772
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/certifi-2024.8.30.dist-info/METADATA
@@ -0,0 +1,67 @@
+Metadata-Version: 2.1
+Name: certifi
+Version: 2024.8.30
+Summary: Python package for providing Mozilla's CA Bundle.
+Home-page: https://github.com/certifi/python-certifi
+Author: Kenneth Reitz
+Author-email: me@kennethreitz.com
+License: MPL-2.0
+Project-URL: Source, https://github.com/certifi/python-certifi
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: Intended Audience :: Developers
+Classifier: License :: OSI Approved :: Mozilla Public License 2.0 (MPL 2.0)
+Classifier: Natural Language :: English
+Classifier: Programming Language :: Python
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3 :: Only
+Classifier: Programming Language :: Python :: 3.6
+Classifier: Programming Language :: Python :: 3.7
+Classifier: Programming Language :: Python :: 3.8
+Classifier: Programming Language :: Python :: 3.9
+Classifier: Programming Language :: Python :: 3.10
+Classifier: Programming Language :: Python :: 3.11
+Classifier: Programming Language :: Python :: 3.12
+Requires-Python: >=3.6
+License-File: LICENSE
+
+Certifi: Python SSL Certificates
+================================
+
+Certifi provides Mozilla's carefully curated collection of Root Certificates for
+validating the trustworthiness of SSL certificates while verifying the identity
+of TLS hosts. It has been extracted from the `Requests`_ project.
+
+Installation
+------------
+
+``certifi`` is available on PyPI. Simply install it with ``pip``::
+
+ $ pip install certifi
+
+Usage
+-----
+
+To reference the installed certificate authority (CA) bundle, you can use the
+built-in function::
+
+ >>> import certifi
+
+ >>> certifi.where()
+ '/usr/local/lib/python3.7/site-packages/certifi/cacert.pem'
+
+Or from the command line::
+
+ $ python -m certifi
+ /usr/local/lib/python3.7/site-packages/certifi/cacert.pem
+
+Enjoy!
+
+.. _`Requests`: https://requests.readthedocs.io/en/master/
+
+Addition/Removal of Certificates
+--------------------------------
+
+Certifi does not support any addition/removal or other modification of the
+CA trust store content. This project is intended to provide a reliable and
+highly portable root of trust to python deployments. Look to upstream projects
+for methods to use alternate trust.
diff --git a/lambdas/aws-dd-forwarder-3.127.0/certifi-2024.8.30.dist-info/RECORD b/lambdas/aws-dd-forwarder-3.127.0/certifi-2024.8.30.dist-info/RECORD
new file mode 100644
index 0000000..7393811
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/certifi-2024.8.30.dist-info/RECORD
@@ -0,0 +1,15 @@
+certifi-2024.8.30.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
+certifi-2024.8.30.dist-info/LICENSE,sha256=6TcW2mucDVpKHfYP5pWzcPBpVgPSH2-D8FPkLPwQyvc,989
+certifi-2024.8.30.dist-info/METADATA,sha256=GhBHRVUN6a4ZdUgE_N5wmukJfyuoE-QyIl8Y3ifNQBM,2222
+certifi-2024.8.30.dist-info/RECORD,,
+certifi-2024.8.30.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+certifi-2024.8.30.dist-info/WHEEL,sha256=UvcQYKBHoFqaQd6LKyqHw9fxEolWLQnlzP0h_LgJAfI,91
+certifi-2024.8.30.dist-info/top_level.txt,sha256=KMu4vUCfsjLrkPbSNdgdekS-pVJzBAJFO__nI8NF6-U,8
+certifi/__init__.py,sha256=p_GYZrjUwPBUhpLlCZoGb0miKBKSqDAyZC5DvIuqbHQ,94
+certifi/__main__.py,sha256=xBBoj905TUWBLRGANOcf7oi6e-3dMP4cEoG9OyMs11g,243
+certifi/__pycache__/__init__.cpython-311.pyc,,
+certifi/__pycache__/__main__.cpython-311.pyc,,
+certifi/__pycache__/core.cpython-311.pyc,,
+certifi/cacert.pem,sha256=lO3rZukXdPyuk6BWUJFOKQliWaXH6HGh9l1GGrUgG0c,299427
+certifi/core.py,sha256=qRDDFyXVJwTB_EmoGppaXU_R9qCZvhl-EzxPMuV3nTA,4426
+certifi/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
diff --git a/lambdas/aws-dd-forwarder-3.127.0/certifi-2024.8.30.dist-info/REQUESTED b/lambdas/aws-dd-forwarder-3.127.0/certifi-2024.8.30.dist-info/REQUESTED
new file mode 100644
index 0000000..e69de29
diff --git a/lambdas/aws-dd-forwarder-3.127.0/certifi-2024.8.30.dist-info/WHEEL b/lambdas/aws-dd-forwarder-3.127.0/certifi-2024.8.30.dist-info/WHEEL
new file mode 100644
index 0000000..57e56b7
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/certifi-2024.8.30.dist-info/WHEEL
@@ -0,0 +1,5 @@
+Wheel-Version: 1.0
+Generator: setuptools (74.0.0)
+Root-Is-Purelib: true
+Tag: py3-none-any
+
diff --git a/lambdas/aws-dd-forwarder-3.127.0/certifi-2024.8.30.dist-info/top_level.txt b/lambdas/aws-dd-forwarder-3.127.0/certifi-2024.8.30.dist-info/top_level.txt
new file mode 100644
index 0000000..963eac5
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/certifi-2024.8.30.dist-info/top_level.txt
@@ -0,0 +1 @@
+certifi
diff --git a/lambdas/aws-dd-forwarder-3.127.0/certifi/__init__.py b/lambdas/aws-dd-forwarder-3.127.0/certifi/__init__.py
new file mode 100644
index 0000000..f61d77f
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/certifi/__init__.py
@@ -0,0 +1,4 @@
+from .core import contents, where
+
+__all__ = ["contents", "where"]
+__version__ = "2024.08.30"
diff --git a/lambdas/aws-dd-forwarder-3.127.0/certifi/__main__.py b/lambdas/aws-dd-forwarder-3.127.0/certifi/__main__.py
new file mode 100644
index 0000000..8945b5d
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/certifi/__main__.py
@@ -0,0 +1,12 @@
+import argparse
+
+from certifi import contents, where
+
+parser = argparse.ArgumentParser()
+parser.add_argument("-c", "--contents", action="store_true")
+args = parser.parse_args()
+
+if args.contents:
+ print(contents())
+else:
+ print(where())
diff --git a/lambdas/aws-dd-forwarder-3.127.0/certifi/cacert.pem b/lambdas/aws-dd-forwarder-3.127.0/certifi/cacert.pem
new file mode 100644
index 0000000..3c165a1
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/certifi/cacert.pem
@@ -0,0 +1,4929 @@
+
+# Issuer: CN=GlobalSign Root CA O=GlobalSign nv-sa OU=Root CA
+# Subject: CN=GlobalSign Root CA O=GlobalSign nv-sa OU=Root CA
+# Label: "GlobalSign Root CA"
+# Serial: 4835703278459707669005204
+# MD5 Fingerprint: 3e:45:52:15:09:51:92:e1:b7:5d:37:9f:b1:87:29:8a
+# SHA1 Fingerprint: b1:bc:96:8b:d4:f4:9d:62:2a:a8:9a:81:f2:15:01:52:a4:1d:82:9c
+# SHA256 Fingerprint: eb:d4:10:40:e4:bb:3e:c7:42:c9:e3:81:d3:1e:f2:a4:1a:48:b6:68:5c:96:e7:ce:f3:c1:df:6c:d4:33:1c:99
+-----BEGIN CERTIFICATE-----
+MIIDdTCCAl2gAwIBAgILBAAAAAABFUtaw5QwDQYJKoZIhvcNAQEFBQAwVzELMAkG
+A1UEBhMCQkUxGTAXBgNVBAoTEEdsb2JhbFNpZ24gbnYtc2ExEDAOBgNVBAsTB1Jv
+b3QgQ0ExGzAZBgNVBAMTEkdsb2JhbFNpZ24gUm9vdCBDQTAeFw05ODA5MDExMjAw
+MDBaFw0yODAxMjgxMjAwMDBaMFcxCzAJBgNVBAYTAkJFMRkwFwYDVQQKExBHbG9i
+YWxTaWduIG52LXNhMRAwDgYDVQQLEwdSb290IENBMRswGQYDVQQDExJHbG9iYWxT
+aWduIFJvb3QgQ0EwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDaDuaZ
+jc6j40+Kfvvxi4Mla+pIH/EqsLmVEQS98GPR4mdmzxzdzxtIK+6NiY6arymAZavp
+xy0Sy6scTHAHoT0KMM0VjU/43dSMUBUc71DuxC73/OlS8pF94G3VNTCOXkNz8kHp
+1Wrjsok6Vjk4bwY8iGlbKk3Fp1S4bInMm/k8yuX9ifUSPJJ4ltbcdG6TRGHRjcdG
+snUOhugZitVtbNV4FpWi6cgKOOvyJBNPc1STE4U6G7weNLWLBYy5d4ux2x8gkasJ
+U26Qzns3dLlwR5EiUWMWea6xrkEmCMgZK9FGqkjWZCrXgzT/LCrBbBlDSgeF59N8
+9iFo7+ryUp9/k5DPAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8E
+BTADAQH/MB0GA1UdDgQWBBRge2YaRQ2XyolQL30EzTSo//z9SzANBgkqhkiG9w0B
+AQUFAAOCAQEA1nPnfE920I2/7LqivjTFKDK1fPxsnCwrvQmeU79rXqoRSLblCKOz
+yj1hTdNGCbM+w6DjY1Ub8rrvrTnhQ7k4o+YviiY776BQVvnGCv04zcQLcFGUl5gE
+38NflNUVyRRBnMRddWQVDf9VMOyGj/8N7yy5Y0b2qvzfvGn9LhJIZJrglfCm7ymP
+AbEVtQwdpf5pLGkkeB6zpxxxYu7KyJesF12KwvhHhm4qxFYxldBniYUr+WymXUad
+DKqC5JlR3XC321Y9YeRq4VzW9v493kHMB65jUr9TU/Qr6cf9tveCX4XSQRjbgbME
+HMUfpIBvFSDJ3gyICh3WZlXi/EjJKSZp4A==
+-----END CERTIFICATE-----
+
+# Issuer: CN=Entrust.net Certification Authority (2048) O=Entrust.net OU=www.entrust.net/CPS_2048 incorp. by ref. (limits liab.)/(c) 1999 Entrust.net Limited
+# Subject: CN=Entrust.net Certification Authority (2048) O=Entrust.net OU=www.entrust.net/CPS_2048 incorp. by ref. (limits liab.)/(c) 1999 Entrust.net Limited
+# Label: "Entrust.net Premium 2048 Secure Server CA"
+# Serial: 946069240
+# MD5 Fingerprint: ee:29:31:bc:32:7e:9a:e6:e8:b5:f7:51:b4:34:71:90
+# SHA1 Fingerprint: 50:30:06:09:1d:97:d4:f5:ae:39:f7:cb:e7:92:7d:7d:65:2d:34:31
+# SHA256 Fingerprint: 6d:c4:71:72:e0:1c:bc:b0:bf:62:58:0d:89:5f:e2:b8:ac:9a:d4:f8:73:80:1e:0c:10:b9:c8:37:d2:1e:b1:77
+-----BEGIN CERTIFICATE-----
+MIIEKjCCAxKgAwIBAgIEOGPe+DANBgkqhkiG9w0BAQUFADCBtDEUMBIGA1UEChML
+RW50cnVzdC5uZXQxQDA+BgNVBAsUN3d3dy5lbnRydXN0Lm5ldC9DUFNfMjA0OCBp
+bmNvcnAuIGJ5IHJlZi4gKGxpbWl0cyBsaWFiLikxJTAjBgNVBAsTHChjKSAxOTk5
+IEVudHJ1c3QubmV0IExpbWl0ZWQxMzAxBgNVBAMTKkVudHJ1c3QubmV0IENlcnRp
+ZmljYXRpb24gQXV0aG9yaXR5ICgyMDQ4KTAeFw05OTEyMjQxNzUwNTFaFw0yOTA3
+MjQxNDE1MTJaMIG0MRQwEgYDVQQKEwtFbnRydXN0Lm5ldDFAMD4GA1UECxQ3d3d3
+LmVudHJ1c3QubmV0L0NQU18yMDQ4IGluY29ycC4gYnkgcmVmLiAobGltaXRzIGxp
+YWIuKTElMCMGA1UECxMcKGMpIDE5OTkgRW50cnVzdC5uZXQgTGltaXRlZDEzMDEG
+A1UEAxMqRW50cnVzdC5uZXQgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkgKDIwNDgp
+MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEArU1LqRKGsuqjIAcVFmQq
+K0vRvwtKTY7tgHalZ7d4QMBzQshowNtTK91euHaYNZOLGp18EzoOH1u3Hs/lJBQe
+sYGpjX24zGtLA/ECDNyrpUAkAH90lKGdCCmziAv1h3edVc3kw37XamSrhRSGlVuX
+MlBvPci6Zgzj/L24ScF2iUkZ/cCovYmjZy/Gn7xxGWC4LeksyZB2ZnuU4q941mVT
+XTzWnLLPKQP5L6RQstRIzgUyVYr9smRMDuSYB3Xbf9+5CFVghTAp+XtIpGmG4zU/
+HoZdenoVve8AjhUiVBcAkCaTvA5JaJG/+EfTnZVCwQ5N328mz8MYIWJmQ3DW1cAH
+4QIDAQABo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNV
+HQ4EFgQUVeSB0RGAvtiJuQijMfmhJAkWuXAwDQYJKoZIhvcNAQEFBQADggEBADub
+j1abMOdTmXx6eadNl9cZlZD7Bh/KM3xGY4+WZiT6QBshJ8rmcnPyT/4xmf3IDExo
+U8aAghOY+rat2l098c5u9hURlIIM7j+VrxGrD9cv3h8Dj1csHsm7mhpElesYT6Yf
+zX1XEC+bBAlahLVu2B064dae0Wx5XnkcFMXj0EyTO2U87d89vqbllRrDtRnDvV5b
+u/8j72gZyxKTJ1wDLW8w0B62GqzeWvfRqqgnpv55gcR5mTNXuhKwqeBCbJPKVt7+
+bYQLCIt+jerXmCHG8+c8eS9enNFMFY3h7CI3zJpDC5fcgJCNs2ebb0gIFVbPv/Er
+fF6adulZkMV8gzURZVE=
+-----END CERTIFICATE-----
+
+# Issuer: CN=Baltimore CyberTrust Root O=Baltimore OU=CyberTrust
+# Subject: CN=Baltimore CyberTrust Root O=Baltimore OU=CyberTrust
+# Label: "Baltimore CyberTrust Root"
+# Serial: 33554617
+# MD5 Fingerprint: ac:b6:94:a5:9c:17:e0:d7:91:52:9b:b1:97:06:a6:e4
+# SHA1 Fingerprint: d4:de:20:d0:5e:66:fc:53:fe:1a:50:88:2c:78:db:28:52:ca:e4:74
+# SHA256 Fingerprint: 16:af:57:a9:f6:76:b0:ab:12:60:95:aa:5e:ba:de:f2:2a:b3:11:19:d6:44:ac:95:cd:4b:93:db:f3:f2:6a:eb
+-----BEGIN CERTIFICATE-----
+MIIDdzCCAl+gAwIBAgIEAgAAuTANBgkqhkiG9w0BAQUFADBaMQswCQYDVQQGEwJJ
+RTESMBAGA1UEChMJQmFsdGltb3JlMRMwEQYDVQQLEwpDeWJlclRydXN0MSIwIAYD
+VQQDExlCYWx0aW1vcmUgQ3liZXJUcnVzdCBSb290MB4XDTAwMDUxMjE4NDYwMFoX
+DTI1MDUxMjIzNTkwMFowWjELMAkGA1UEBhMCSUUxEjAQBgNVBAoTCUJhbHRpbW9y
+ZTETMBEGA1UECxMKQ3liZXJUcnVzdDEiMCAGA1UEAxMZQmFsdGltb3JlIEN5YmVy
+VHJ1c3QgUm9vdDCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAKMEuyKr
+mD1X6CZymrV51Cni4eiVgLGw41uOKymaZN+hXe2wCQVt2yguzmKiYv60iNoS6zjr
+IZ3AQSsBUnuId9Mcj8e6uYi1agnnc+gRQKfRzMpijS3ljwumUNKoUMMo6vWrJYeK
+mpYcqWe4PwzV9/lSEy/CG9VwcPCPwBLKBsua4dnKM3p31vjsufFoREJIE9LAwqSu
+XmD+tqYF/LTdB1kC1FkYmGP1pWPgkAx9XbIGevOF6uvUA65ehD5f/xXtabz5OTZy
+dc93Uk3zyZAsuT3lySNTPx8kmCFcB5kpvcY67Oduhjprl3RjM71oGDHweI12v/ye
+jl0qhqdNkNwnGjkCAwEAAaNFMEMwHQYDVR0OBBYEFOWdWTCCR1jMrPoIVDaGezq1
+BE3wMBIGA1UdEwEB/wQIMAYBAf8CAQMwDgYDVR0PAQH/BAQDAgEGMA0GCSqGSIb3
+DQEBBQUAA4IBAQCFDF2O5G9RaEIFoN27TyclhAO992T9Ldcw46QQF+vaKSm2eT92
+9hkTI7gQCvlYpNRhcL0EYWoSihfVCr3FvDB81ukMJY2GQE/szKN+OMY3EU/t3Wgx
+jkzSswF07r51XgdIGn9w/xZchMB5hbgF/X++ZRGjD8ACtPhSNzkE1akxehi/oCr0
+Epn3o0WC4zxe9Z2etciefC7IpJ5OCBRLbf1wbWsaY71k5h+3zvDyny67G7fyUIhz
+ksLi4xaNmjICq44Y3ekQEe5+NauQrz4wlHrQMz2nZQ/1/I6eYs9HRCwBXbsdtTLS
+R9I4LtD+gdwyah617jzV/OeBHRnDJELqYzmp
+-----END CERTIFICATE-----
+
+# Issuer: CN=Entrust Root Certification Authority O=Entrust, Inc. OU=www.entrust.net/CPS is incorporated by reference/(c) 2006 Entrust, Inc.
+# Subject: CN=Entrust Root Certification Authority O=Entrust, Inc. OU=www.entrust.net/CPS is incorporated by reference/(c) 2006 Entrust, Inc.
+# Label: "Entrust Root Certification Authority"
+# Serial: 1164660820
+# MD5 Fingerprint: d6:a5:c3:ed:5d:dd:3e:00:c1:3d:87:92:1f:1d:3f:e4
+# SHA1 Fingerprint: b3:1e:b1:b7:40:e3:6c:84:02:da:dc:37:d4:4d:f5:d4:67:49:52:f9
+# SHA256 Fingerprint: 73:c1:76:43:4f:1b:c6:d5:ad:f4:5b:0e:76:e7:27:28:7c:8d:e5:76:16:c1:e6:e6:14:1a:2b:2c:bc:7d:8e:4c
+-----BEGIN CERTIFICATE-----
+MIIEkTCCA3mgAwIBAgIERWtQVDANBgkqhkiG9w0BAQUFADCBsDELMAkGA1UEBhMC
+VVMxFjAUBgNVBAoTDUVudHJ1c3QsIEluYy4xOTA3BgNVBAsTMHd3dy5lbnRydXN0
+Lm5ldC9DUFMgaXMgaW5jb3Jwb3JhdGVkIGJ5IHJlZmVyZW5jZTEfMB0GA1UECxMW
+KGMpIDIwMDYgRW50cnVzdCwgSW5jLjEtMCsGA1UEAxMkRW50cnVzdCBSb290IENl
+cnRpZmljYXRpb24gQXV0aG9yaXR5MB4XDTA2MTEyNzIwMjM0MloXDTI2MTEyNzIw
+NTM0MlowgbAxCzAJBgNVBAYTAlVTMRYwFAYDVQQKEw1FbnRydXN0LCBJbmMuMTkw
+NwYDVQQLEzB3d3cuZW50cnVzdC5uZXQvQ1BTIGlzIGluY29ycG9yYXRlZCBieSBy
+ZWZlcmVuY2UxHzAdBgNVBAsTFihjKSAyMDA2IEVudHJ1c3QsIEluYy4xLTArBgNV
+BAMTJEVudHJ1c3QgUm9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTCCASIwDQYJ
+KoZIhvcNAQEBBQADggEPADCCAQoCggEBALaVtkNC+sZtKm9I35RMOVcF7sN5EUFo
+Nu3s/poBj6E4KPz3EEZmLk0eGrEaTsbRwJWIsMn/MYszA9u3g3s+IIRe7bJWKKf4
+4LlAcTfFy0cOlypowCKVYhXbR9n10Cv/gkvJrT7eTNuQgFA/CYqEAOwwCj0Yzfv9
+KlmaI5UXLEWeH25DeW0MXJj+SKfFI0dcXv1u5x609mhF0YaDW6KKjbHjKYD+JXGI
+rb68j6xSlkuqUY3kEzEZ6E5Nn9uss2rVvDlUccp6en+Q3X0dgNmBu1kmwhH+5pPi
+94DkZfs0Nw4pgHBNrziGLp5/V6+eF67rHMsoIV+2HNjnogQi+dPa2MsCAwEAAaOB
+sDCBrTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zArBgNVHRAEJDAi
+gA8yMDA2MTEyNzIwMjM0MlqBDzIwMjYxMTI3MjA1MzQyWjAfBgNVHSMEGDAWgBRo
+kORnpKZTgMeGZqTx90tD+4S9bTAdBgNVHQ4EFgQUaJDkZ6SmU4DHhmak8fdLQ/uE
+vW0wHQYJKoZIhvZ9B0EABBAwDhsIVjcuMTo0LjADAgSQMA0GCSqGSIb3DQEBBQUA
+A4IBAQCT1DCw1wMgKtD5Y+iRDAUgqV8ZyntyTtSx29CW+1RaGSwMCPeyvIWonX9t
+O1KzKtvn1ISMY/YPyyYBkVBs9F8U4pN0wBOeMDpQ47RgxRzwIkSNcUesyBrJ6Zua
+AGAT/3B+XxFNSRuzFVJ7yVTav52Vr2ua2J7p8eRDjeIRRDq/r72DQnNSi6q7pynP
+9WQcCk3RvKqsnyrQ/39/2n3qse0wJcGE2jTSW3iDVuycNsMm4hH2Z0kdkquM++v/
+eu6FSqdQgPCnXEqULl8FmTxSQeDNtGPPAUO6nIPcj2A781q0tHuu2guQOHXvgR1m
+0vdXcDazv/wor3ElhVsT/h5/WrQ8
+-----END CERTIFICATE-----
+
+# Issuer: CN=AAA Certificate Services O=Comodo CA Limited
+# Subject: CN=AAA Certificate Services O=Comodo CA Limited
+# Label: "Comodo AAA Services root"
+# Serial: 1
+# MD5 Fingerprint: 49:79:04:b0:eb:87:19:ac:47:b0:bc:11:51:9b:74:d0
+# SHA1 Fingerprint: d1:eb:23:a4:6d:17:d6:8f:d9:25:64:c2:f1:f1:60:17:64:d8:e3:49
+# SHA256 Fingerprint: d7:a7:a0:fb:5d:7e:27:31:d7:71:e9:48:4e:bc:de:f7:1d:5f:0c:3e:0a:29:48:78:2b:c8:3e:e0:ea:69:9e:f4
+-----BEGIN CERTIFICATE-----
+MIIEMjCCAxqgAwIBAgIBATANBgkqhkiG9w0BAQUFADB7MQswCQYDVQQGEwJHQjEb
+MBkGA1UECAwSR3JlYXRlciBNYW5jaGVzdGVyMRAwDgYDVQQHDAdTYWxmb3JkMRow
+GAYDVQQKDBFDb21vZG8gQ0EgTGltaXRlZDEhMB8GA1UEAwwYQUFBIENlcnRpZmlj
+YXRlIFNlcnZpY2VzMB4XDTA0MDEwMTAwMDAwMFoXDTI4MTIzMTIzNTk1OVowezEL
+MAkGA1UEBhMCR0IxGzAZBgNVBAgMEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UE
+BwwHU2FsZm9yZDEaMBgGA1UECgwRQ29tb2RvIENBIExpbWl0ZWQxITAfBgNVBAMM
+GEFBQSBDZXJ0aWZpY2F0ZSBTZXJ2aWNlczCCASIwDQYJKoZIhvcNAQEBBQADggEP
+ADCCAQoCggEBAL5AnfRu4ep2hxxNRUSOvkbIgwadwSr+GB+O5AL686tdUIoWMQua
+BtDFcCLNSS1UY8y2bmhGC1Pqy0wkwLxyTurxFa70VJoSCsN6sjNg4tqJVfMiWPPe
+3M/vg4aijJRPn2jymJBGhCfHdr/jzDUsi14HZGWCwEiwqJH5YZ92IFCokcdmtet4
+YgNW8IoaE+oxox6gmf049vYnMlhvB/VruPsUK6+3qszWY19zjNoFmag4qMsXeDZR
+rOme9Hg6jc8P2ULimAyrL58OAd7vn5lJ8S3frHRNG5i1R8XlKdH5kBjHYpy+g8cm
+ez6KJcfA3Z3mNWgQIJ2P2N7Sw4ScDV7oL8kCAwEAAaOBwDCBvTAdBgNVHQ4EFgQU
+oBEKIz6W8Qfs4q8p74Klf9AwpLQwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQF
+MAMBAf8wewYDVR0fBHQwcjA4oDagNIYyaHR0cDovL2NybC5jb21vZG9jYS5jb20v
+QUFBQ2VydGlmaWNhdGVTZXJ2aWNlcy5jcmwwNqA0oDKGMGh0dHA6Ly9jcmwuY29t
+b2RvLm5ldC9BQUFDZXJ0aWZpY2F0ZVNlcnZpY2VzLmNybDANBgkqhkiG9w0BAQUF
+AAOCAQEACFb8AvCb6P+k+tZ7xkSAzk/ExfYAWMymtrwUSWgEdujm7l3sAg9g1o1Q
+GE8mTgHj5rCl7r+8dFRBv/38ErjHT1r0iWAFf2C3BUrz9vHCv8S5dIa2LX1rzNLz
+Rt0vxuBqw8M0Ayx9lt1awg6nCpnBBYurDC/zXDrPbDdVCYfeU0BsWO/8tqtlbgT2
+G9w84FoVxp7Z8VlIMCFlA2zs6SFz7JsDoeA3raAVGI/6ugLOpyypEBMs1OUIJqsi
+l2D4kF501KKaU73yqWjgom7C12yxow+ev+to51byrvLjKzg6CYG1a4XXvi3tPxq3
+smPi9WIsgtRqAEFQ8TmDn5XpNpaYbg==
+-----END CERTIFICATE-----
+
+# Issuer: CN=QuoVadis Root CA 2 O=QuoVadis Limited
+# Subject: CN=QuoVadis Root CA 2 O=QuoVadis Limited
+# Label: "QuoVadis Root CA 2"
+# Serial: 1289
+# MD5 Fingerprint: 5e:39:7b:dd:f8:ba:ec:82:e9:ac:62:ba:0c:54:00:2b
+# SHA1 Fingerprint: ca:3a:fb:cf:12:40:36:4b:44:b2:16:20:88:80:48:39:19:93:7c:f7
+# SHA256 Fingerprint: 85:a0:dd:7d:d7:20:ad:b7:ff:05:f8:3d:54:2b:20:9d:c7:ff:45:28:f7:d6:77:b1:83:89:fe:a5:e5:c4:9e:86
+-----BEGIN CERTIFICATE-----
+MIIFtzCCA5+gAwIBAgICBQkwDQYJKoZIhvcNAQEFBQAwRTELMAkGA1UEBhMCQk0x
+GTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxGzAZBgNVBAMTElF1b1ZhZGlzIFJv
+b3QgQ0EgMjAeFw0wNjExMjQxODI3MDBaFw0zMTExMjQxODIzMzNaMEUxCzAJBgNV
+BAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBMaW1pdGVkMRswGQYDVQQDExJRdW9W
+YWRpcyBSb290IENBIDIwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCa
+GMpLlA0ALa8DKYrwD4HIrkwZhR0In6spRIXzL4GtMh6QRr+jhiYaHv5+HBg6XJxg
+Fyo6dIMzMH1hVBHL7avg5tKifvVrbxi3Cgst/ek+7wrGsxDp3MJGF/hd/aTa/55J
+WpzmM+Yklvc/ulsrHHo1wtZn/qtmUIttKGAr79dgw8eTvI02kfN/+NsRE8Scd3bB
+rrcCaoF6qUWD4gXmuVbBlDePSHFjIuwXZQeVikvfj8ZaCuWw419eaxGrDPmF60Tp
++ARz8un+XJiM9XOva7R+zdRcAitMOeGylZUtQofX1bOQQ7dsE/He3fbE+Ik/0XX1
+ksOR1YqI0JDs3G3eicJlcZaLDQP9nL9bFqyS2+r+eXyt66/3FsvbzSUr5R/7mp/i
+Ucw6UwxI5g69ybR2BlLmEROFcmMDBOAENisgGQLodKcftslWZvB1JdxnwQ5hYIiz
+PtGo/KPaHbDRsSNU30R2be1B2MGyIrZTHN81Hdyhdyox5C315eXbyOD/5YDXC2Og
+/zOhD7osFRXql7PSorW+8oyWHhqPHWykYTe5hnMz15eWniN9gqRMgeKh0bpnX5UH
+oycR7hYQe7xFSkyyBNKr79X9DFHOUGoIMfmR2gyPZFwDwzqLID9ujWc9Otb+fVuI
+yV77zGHcizN300QyNQliBJIWENieJ0f7OyHj+OsdWwIDAQABo4GwMIGtMA8GA1Ud
+EwEB/wQFMAMBAf8wCwYDVR0PBAQDAgEGMB0GA1UdDgQWBBQahGK8SEwzJQTU7tD2
+A8QZRtGUazBuBgNVHSMEZzBlgBQahGK8SEwzJQTU7tD2A8QZRtGUa6FJpEcwRTEL
+MAkGA1UEBhMCQk0xGTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxGzAZBgNVBAMT
+ElF1b1ZhZGlzIFJvb3QgQ0EgMoICBQkwDQYJKoZIhvcNAQEFBQADggIBAD4KFk2f
+BluornFdLwUvZ+YTRYPENvbzwCYMDbVHZF34tHLJRqUDGCdViXh9duqWNIAXINzn
+g/iN/Ae42l9NLmeyhP3ZRPx3UIHmfLTJDQtyU/h2BwdBR5YM++CCJpNVjP4iH2Bl
+fF/nJrP3MpCYUNQ3cVX2kiF495V5+vgtJodmVjB3pjd4M1IQWK4/YY7yarHvGH5K
+WWPKjaJW1acvvFYfzznB4vsKqBUsfU16Y8Zsl0Q80m/DShcK+JDSV6IZUaUtl0Ha
+B0+pUNqQjZRG4T7wlP0QADj1O+hA4bRuVhogzG9Yje0uRY/W6ZM/57Es3zrWIozc
+hLsib9D45MY56QSIPMO661V6bYCZJPVsAfv4l7CUW+v90m/xd2gNNWQjrLhVoQPR
+TUIZ3Ph1WVaj+ahJefivDrkRoHy3au000LYmYjgahwz46P0u05B/B5EqHdZ+XIWD
+mbA4CD/pXvk1B+TJYm5Xf6dQlfe6yJvmjqIBxdZmv3lh8zwc4bmCXF2gw+nYSL0Z
+ohEUGW6yhhtoPkg3Goi3XZZenMfvJ2II4pEZXNLxId26F0KCl3GBUzGpn/Z9Yr9y
+4aOTHcyKJloJONDO1w2AFrR4pTqHTI2KpdVGl/IsELm8VCLAAVBpQ570su9t+Oza
+8eOx79+Rj1QqCyXBJhnEUhAFZdWCEOrCMc0u
+-----END CERTIFICATE-----
+
+# Issuer: CN=QuoVadis Root CA 3 O=QuoVadis Limited
+# Subject: CN=QuoVadis Root CA 3 O=QuoVadis Limited
+# Label: "QuoVadis Root CA 3"
+# Serial: 1478
+# MD5 Fingerprint: 31:85:3c:62:94:97:63:b9:aa:fd:89:4e:af:6f:e0:cf
+# SHA1 Fingerprint: 1f:49:14:f7:d8:74:95:1d:dd:ae:02:c0:be:fd:3a:2d:82:75:51:85
+# SHA256 Fingerprint: 18:f1:fc:7f:20:5d:f8:ad:dd:eb:7f:e0:07:dd:57:e3:af:37:5a:9c:4d:8d:73:54:6b:f4:f1:fe:d1:e1:8d:35
+-----BEGIN CERTIFICATE-----
+MIIGnTCCBIWgAwIBAgICBcYwDQYJKoZIhvcNAQEFBQAwRTELMAkGA1UEBhMCQk0x
+GTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxGzAZBgNVBAMTElF1b1ZhZGlzIFJv
+b3QgQ0EgMzAeFw0wNjExMjQxOTExMjNaFw0zMTExMjQxOTA2NDRaMEUxCzAJBgNV
+BAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBMaW1pdGVkMRswGQYDVQQDExJRdW9W
+YWRpcyBSb290IENBIDMwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDM
+V0IWVJzmmNPTTe7+7cefQzlKZbPoFog02w1ZkXTPkrgEQK0CSzGrvI2RaNggDhoB
+4hp7Thdd4oq3P5kazethq8Jlph+3t723j/z9cI8LoGe+AaJZz3HmDyl2/7FWeUUr
+H556VOijKTVopAFPD6QuN+8bv+OPEKhyq1hX51SGyMnzW9os2l2ObjyjPtr7guXd
+8lyyBTNvijbO0BNO/79KDDRMpsMhvVAEVeuxu537RR5kFd5VAYwCdrXLoT9Cabwv
+vWhDFlaJKjdhkf2mrk7AyxRllDdLkgbvBNDInIjbC3uBr7E9KsRlOni27tyAsdLT
+mZw67mtaa7ONt9XOnMK+pUsvFrGeaDsGb659n/je7Mwpp5ijJUMv7/FfJuGITfhe
+btfZFG4ZM2mnO4SJk8RTVROhUXhA+LjJou57ulJCg54U7QVSWllWp5f8nT8KKdjc
+T5EOE7zelaTfi5m+rJsziO+1ga8bxiJTyPbH7pcUsMV8eFLI8M5ud2CEpukqdiDt
+WAEXMJPpGovgc2PZapKUSU60rUqFxKMiMPwJ7Wgic6aIDFUhWMXhOp8q3crhkODZ
+c6tsgLjoC2SToJyMGf+z0gzskSaHirOi4XCPLArlzW1oUevaPwV/izLmE1xr/l9A
+4iLItLRkT9a6fUg+qGkM17uGcclzuD87nSVL2v9A6wIDAQABo4IBlTCCAZEwDwYD
+VR0TAQH/BAUwAwEB/zCB4QYDVR0gBIHZMIHWMIHTBgkrBgEEAb5YAAMwgcUwgZMG
+CCsGAQUFBwICMIGGGoGDQW55IHVzZSBvZiB0aGlzIENlcnRpZmljYXRlIGNvbnN0
+aXR1dGVzIGFjY2VwdGFuY2Ugb2YgdGhlIFF1b1ZhZGlzIFJvb3QgQ0EgMyBDZXJ0
+aWZpY2F0ZSBQb2xpY3kgLyBDZXJ0aWZpY2F0aW9uIFByYWN0aWNlIFN0YXRlbWVu
+dC4wLQYIKwYBBQUHAgEWIWh0dHA6Ly93d3cucXVvdmFkaXNnbG9iYWwuY29tL2Nw
+czALBgNVHQ8EBAMCAQYwHQYDVR0OBBYEFPLAE+CCQz777i9nMpY1XNu4ywLQMG4G
+A1UdIwRnMGWAFPLAE+CCQz777i9nMpY1XNu4ywLQoUmkRzBFMQswCQYDVQQGEwJC
+TTEZMBcGA1UEChMQUXVvVmFkaXMgTGltaXRlZDEbMBkGA1UEAxMSUXVvVmFkaXMg
+Um9vdCBDQSAzggIFxjANBgkqhkiG9w0BAQUFAAOCAgEAT62gLEz6wPJv92ZVqyM0
+7ucp2sNbtrCD2dDQ4iH782CnO11gUyeim/YIIirnv6By5ZwkajGxkHon24QRiSem
+d1o417+shvzuXYO8BsbRd2sPbSQvS3pspweWyuOEn62Iix2rFo1bZhfZFvSLgNLd
++LJ2w/w4E6oM3kJpK27zPOuAJ9v1pkQNn1pVWQvVDVJIxa6f8i+AxeoyUDUSly7B
+4f/xI4hROJ/yZlZ25w9Rl6VSDE1JUZU2Pb+iSwwQHYaZTKrzchGT5Or2m9qoXadN
+t54CrnMAyNojA+j56hl0YgCUyyIgvpSnWbWCar6ZeXqp8kokUvd0/bpO5qgdAm6x
+DYBEwa7TIzdfu4V8K5Iu6H6li92Z4b8nby1dqnuH/grdS/yO9SbkbnBCbjPsMZ57
+k8HkyWkaPcBrTiJt7qtYTcbQQcEr6k8Sh17rRdhs9ZgC06DYVYoGmRmioHfRMJ6s
+zHXug/WwYjnPbFfiTNKRCw51KBuav/0aQ/HKd/s7j2G4aSgWQgRecCocIdiP4b0j
+Wy10QJLZYxkNc91pvGJHvOB0K7Lrfb5BG7XARsWhIstfTsEokt4YutUqKLsRixeT
+mJlglFwjz1onl14LBQaTNx47aTbrqZ5hHY8y2o4M1nQ+ewkk2gF3R8Q7zTSMmfXK
+4SVhM7JZG+Ju1zdXtg2pEto=
+-----END CERTIFICATE-----
+
+# Issuer: CN=XRamp Global Certification Authority O=XRamp Security Services Inc OU=www.xrampsecurity.com
+# Subject: CN=XRamp Global Certification Authority O=XRamp Security Services Inc OU=www.xrampsecurity.com
+# Label: "XRamp Global CA Root"
+# Serial: 107108908803651509692980124233745014957
+# MD5 Fingerprint: a1:0b:44:b3:ca:10:d8:00:6e:9d:0f:d8:0f:92:0a:d1
+# SHA1 Fingerprint: b8:01:86:d1:eb:9c:86:a5:41:04:cf:30:54:f3:4c:52:b7:e5:58:c6
+# SHA256 Fingerprint: ce:cd:dc:90:50:99:d8:da:df:c5:b1:d2:09:b7:37:cb:e2:c1:8c:fb:2c:10:c0:ff:0b:cf:0d:32:86:fc:1a:a2
+-----BEGIN CERTIFICATE-----
+MIIEMDCCAxigAwIBAgIQUJRs7Bjq1ZxN1ZfvdY+grTANBgkqhkiG9w0BAQUFADCB
+gjELMAkGA1UEBhMCVVMxHjAcBgNVBAsTFXd3dy54cmFtcHNlY3VyaXR5LmNvbTEk
+MCIGA1UEChMbWFJhbXAgU2VjdXJpdHkgU2VydmljZXMgSW5jMS0wKwYDVQQDEyRY
+UmFtcCBHbG9iYWwgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDQxMTAxMTcx
+NDA0WhcNMzUwMTAxMDUzNzE5WjCBgjELMAkGA1UEBhMCVVMxHjAcBgNVBAsTFXd3
+dy54cmFtcHNlY3VyaXR5LmNvbTEkMCIGA1UEChMbWFJhbXAgU2VjdXJpdHkgU2Vy
+dmljZXMgSW5jMS0wKwYDVQQDEyRYUmFtcCBHbG9iYWwgQ2VydGlmaWNhdGlvbiBB
+dXRob3JpdHkwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCYJB69FbS6
+38eMpSe2OAtp87ZOqCwuIR1cRN8hXX4jdP5efrRKt6atH67gBhbim1vZZ3RrXYCP
+KZ2GG9mcDZhtdhAoWORlsH9KmHmf4MMxfoArtYzAQDsRhtDLooY2YKTVMIJt2W7Q
+DxIEM5dfT2Fa8OT5kavnHTu86M/0ay00fOJIYRyO82FEzG+gSqmUsE3a56k0enI4
+qEHMPJQRfevIpoy3hsvKMzvZPTeL+3o+hiznc9cKV6xkmxnr9A8ECIqsAxcZZPRa
+JSKNNCyy9mgdEm3Tih4U2sSPpuIjhdV6Db1q4Ons7Be7QhtnqiXtRYMh/MHJfNVi
+PvryxS3T/dRlAgMBAAGjgZ8wgZwwEwYJKwYBBAGCNxQCBAYeBABDAEEwCwYDVR0P
+BAQDAgGGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFMZPoj0GY4QJnM5i5ASs
+jVy16bYbMDYGA1UdHwQvMC0wK6ApoCeGJWh0dHA6Ly9jcmwueHJhbXBzZWN1cml0
+eS5jb20vWEdDQS5jcmwwEAYJKwYBBAGCNxUBBAMCAQEwDQYJKoZIhvcNAQEFBQAD
+ggEBAJEVOQMBG2f7Shz5CmBbodpNl2L5JFMn14JkTpAuw0kbK5rc/Kh4ZzXxHfAR
+vbdI4xD2Dd8/0sm2qlWkSLoC295ZLhVbO50WfUfXN+pfTXYSNrsf16GBBEYgoyxt
+qZ4Bfj8pzgCT3/3JknOJiWSe5yvkHJEs0rnOfc5vMZnT5r7SHpDwCRR5XCOrTdLa
+IR9NmXmd4c8nnxCbHIgNsIpkQTG4DmyQJKSbXHGPurt+HBvbaoAPIbzp26a3QPSy
+i6mx5O+aGtA9aZnuqCij4Tyz8LIRnM98QObd50N9otg6tamN8jSZxNQQ4Qb9CYQQ
+O+7ETPTsJ3xCwnR8gooJybQDJbw=
+-----END CERTIFICATE-----
+
+# Issuer: O=The Go Daddy Group, Inc. OU=Go Daddy Class 2 Certification Authority
+# Subject: O=The Go Daddy Group, Inc. OU=Go Daddy Class 2 Certification Authority
+# Label: "Go Daddy Class 2 CA"
+# Serial: 0
+# MD5 Fingerprint: 91:de:06:25:ab:da:fd:32:17:0c:bb:25:17:2a:84:67
+# SHA1 Fingerprint: 27:96:ba:e6:3f:18:01:e2:77:26:1b:a0:d7:77:70:02:8f:20:ee:e4
+# SHA256 Fingerprint: c3:84:6b:f2:4b:9e:93:ca:64:27:4c:0e:c6:7c:1e:cc:5e:02:4f:fc:ac:d2:d7:40:19:35:0e:81:fe:54:6a:e4
+-----BEGIN CERTIFICATE-----
+MIIEADCCAuigAwIBAgIBADANBgkqhkiG9w0BAQUFADBjMQswCQYDVQQGEwJVUzEh
+MB8GA1UEChMYVGhlIEdvIERhZGR5IEdyb3VwLCBJbmMuMTEwLwYDVQQLEyhHbyBE
+YWRkeSBDbGFzcyAyIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MB4XDTA0MDYyOTE3
+MDYyMFoXDTM0MDYyOTE3MDYyMFowYzELMAkGA1UEBhMCVVMxITAfBgNVBAoTGFRo
+ZSBHbyBEYWRkeSBHcm91cCwgSW5jLjExMC8GA1UECxMoR28gRGFkZHkgQ2xhc3Mg
+MiBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTCCASAwDQYJKoZIhvcNAQEBBQADggEN
+ADCCAQgCggEBAN6d1+pXGEmhW+vXX0iG6r7d/+TvZxz0ZWizV3GgXne77ZtJ6XCA
+PVYYYwhv2vLM0D9/AlQiVBDYsoHUwHU9S3/Hd8M+eKsaA7Ugay9qK7HFiH7Eux6w
+wdhFJ2+qN1j3hybX2C32qRe3H3I2TqYXP2WYktsqbl2i/ojgC95/5Y0V4evLOtXi
+EqITLdiOr18SPaAIBQi2XKVlOARFmR6jYGB0xUGlcmIbYsUfb18aQr4CUWWoriMY
+avx4A6lNf4DD+qta/KFApMoZFv6yyO9ecw3ud72a9nmYvLEHZ6IVDd2gWMZEewo+
+YihfukEHU1jPEX44dMX4/7VpkI+EdOqXG68CAQOjgcAwgb0wHQYDVR0OBBYEFNLE
+sNKR1EwRcbNhyz2h/t2oatTjMIGNBgNVHSMEgYUwgYKAFNLEsNKR1EwRcbNhyz2h
+/t2oatTjoWekZTBjMQswCQYDVQQGEwJVUzEhMB8GA1UEChMYVGhlIEdvIERhZGR5
+IEdyb3VwLCBJbmMuMTEwLwYDVQQLEyhHbyBEYWRkeSBDbGFzcyAyIENlcnRpZmlj
+YXRpb24gQXV0aG9yaXR5ggEAMAwGA1UdEwQFMAMBAf8wDQYJKoZIhvcNAQEFBQAD
+ggEBADJL87LKPpH8EsahB4yOd6AzBhRckB4Y9wimPQoZ+YeAEW5p5JYXMP80kWNy
+OO7MHAGjHZQopDH2esRU1/blMVgDoszOYtuURXO1v0XJJLXVggKtI3lpjbi2Tc7P
+TMozI+gciKqdi0FuFskg5YmezTvacPd+mSYgFFQlq25zheabIZ0KbIIOqPjCDPoQ
+HmyW74cNxA9hi63ugyuV+I6ShHI56yDqg+2DzZduCLzrTia2cyvk0/ZM/iZx4mER
+dEr/VxqHD3VILs9RaRegAhJhldXRQLIQTO7ErBBDpqWeCtWVYpoNz4iCxTIM5Cuf
+ReYNnyicsbkqWletNw+vHX/bvZ8=
+-----END CERTIFICATE-----
+
+# Issuer: O=Starfield Technologies, Inc. OU=Starfield Class 2 Certification Authority
+# Subject: O=Starfield Technologies, Inc. OU=Starfield Class 2 Certification Authority
+# Label: "Starfield Class 2 CA"
+# Serial: 0
+# MD5 Fingerprint: 32:4a:4b:bb:c8:63:69:9b:be:74:9a:c6:dd:1d:46:24
+# SHA1 Fingerprint: ad:7e:1c:28:b0:64:ef:8f:60:03:40:20:14:c3:d0:e3:37:0e:b5:8a
+# SHA256 Fingerprint: 14:65:fa:20:53:97:b8:76:fa:a6:f0:a9:95:8e:55:90:e4:0f:cc:7f:aa:4f:b7:c2:c8:67:75:21:fb:5f:b6:58
+-----BEGIN CERTIFICATE-----
+MIIEDzCCAvegAwIBAgIBADANBgkqhkiG9w0BAQUFADBoMQswCQYDVQQGEwJVUzEl
+MCMGA1UEChMcU3RhcmZpZWxkIFRlY2hub2xvZ2llcywgSW5jLjEyMDAGA1UECxMp
+U3RhcmZpZWxkIENsYXNzIDIgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDQw
+NjI5MTczOTE2WhcNMzQwNjI5MTczOTE2WjBoMQswCQYDVQQGEwJVUzElMCMGA1UE
+ChMcU3RhcmZpZWxkIFRlY2hub2xvZ2llcywgSW5jLjEyMDAGA1UECxMpU3RhcmZp
+ZWxkIENsYXNzIDIgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwggEgMA0GCSqGSIb3
+DQEBAQUAA4IBDQAwggEIAoIBAQC3Msj+6XGmBIWtDBFk385N78gDGIc/oav7PKaf
+8MOh2tTYbitTkPskpD6E8J7oX+zlJ0T1KKY/e97gKvDIr1MvnsoFAZMej2YcOadN
++lq2cwQlZut3f+dZxkqZJRRU6ybH838Z1TBwj6+wRir/resp7defqgSHo9T5iaU0
+X9tDkYI22WY8sbi5gv2cOj4QyDvvBmVmepsZGD3/cVE8MC5fvj13c7JdBmzDI1aa
+K4UmkhynArPkPw2vCHmCuDY96pzTNbO8acr1zJ3o/WSNF4Azbl5KXZnJHoe0nRrA
+1W4TNSNe35tfPe/W93bC6j67eA0cQmdrBNj41tpvi/JEoAGrAgEDo4HFMIHCMB0G
+A1UdDgQWBBS/X7fRzt0fhvRbVazc1xDCDqmI5zCBkgYDVR0jBIGKMIGHgBS/X7fR
+zt0fhvRbVazc1xDCDqmI56FspGowaDELMAkGA1UEBhMCVVMxJTAjBgNVBAoTHFN0
+YXJmaWVsZCBUZWNobm9sb2dpZXMsIEluYy4xMjAwBgNVBAsTKVN0YXJmaWVsZCBD
+bGFzcyAyIENlcnRpZmljYXRpb24gQXV0aG9yaXR5ggEAMAwGA1UdEwQFMAMBAf8w
+DQYJKoZIhvcNAQEFBQADggEBAAWdP4id0ckaVaGsafPzWdqbAYcaT1epoXkJKtv3
+L7IezMdeatiDh6GX70k1PncGQVhiv45YuApnP+yz3SFmH8lU+nLMPUxA2IGvd56D
+eruix/U0F47ZEUD0/CwqTRV/p2JdLiXTAAsgGh1o+Re49L2L7ShZ3U0WixeDyLJl
+xy16paq8U4Zt3VekyvggQQto8PT7dL5WXXp59fkdheMtlb71cZBDzI0fmgAKhynp
+VSJYACPq4xJDKVtHCN2MQWplBqjlIapBtJUhlbl90TSrE9atvNziPTnNvT51cKEY
+WQPJIrSPnNVeKtelttQKbfi3QBFGmh95DmK/D5fs4C8fF5Q=
+-----END CERTIFICATE-----
+
+# Issuer: CN=DigiCert Assured ID Root CA O=DigiCert Inc OU=www.digicert.com
+# Subject: CN=DigiCert Assured ID Root CA O=DigiCert Inc OU=www.digicert.com
+# Label: "DigiCert Assured ID Root CA"
+# Serial: 17154717934120587862167794914071425081
+# MD5 Fingerprint: 87:ce:0b:7b:2a:0e:49:00:e1:58:71:9b:37:a8:93:72
+# SHA1 Fingerprint: 05:63:b8:63:0d:62:d7:5a:bb:c8:ab:1e:4b:df:b5:a8:99:b2:4d:43
+# SHA256 Fingerprint: 3e:90:99:b5:01:5e:8f:48:6c:00:bc:ea:9d:11:1e:e7:21:fa:ba:35:5a:89:bc:f1:df:69:56:1e:3d:c6:32:5c
+-----BEGIN CERTIFICATE-----
+MIIDtzCCAp+gAwIBAgIQDOfg5RfYRv6P5WD8G/AwOTANBgkqhkiG9w0BAQUFADBl
+MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3
+d3cuZGlnaWNlcnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJv
+b3QgQ0EwHhcNMDYxMTEwMDAwMDAwWhcNMzExMTEwMDAwMDAwWjBlMQswCQYDVQQG
+EwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNl
+cnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJvb3QgQ0EwggEi
+MA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCtDhXO5EOAXLGH87dg+XESpa7c
+JpSIqvTO9SA5KFhgDPiA2qkVlTJhPLWxKISKityfCgyDF3qPkKyK53lTXDGEKvYP
+mDI2dsze3Tyoou9q+yHyUmHfnyDXH+Kx2f4YZNISW1/5WBg1vEfNoTb5a3/UsDg+
+wRvDjDPZ2C8Y/igPs6eD1sNuRMBhNZYW/lmci3Zt1/GiSw0r/wty2p5g0I6QNcZ4
+VYcgoc/lbQrISXwxmDNsIumH0DJaoroTghHtORedmTpyoeb6pNnVFzF1roV9Iq4/
+AUaG9ih5yLHa5FcXxH4cDrC0kqZWs72yl+2qp/C3xag/lRbQ/6GW6whfGHdPAgMB
+AAGjYzBhMA4GA1UdDwEB/wQEAwIBhjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQW
+BBRF66Kv9JLLgjEtUYunpyGd823IDzAfBgNVHSMEGDAWgBRF66Kv9JLLgjEtUYun
+pyGd823IDzANBgkqhkiG9w0BAQUFAAOCAQEAog683+Lt8ONyc3pklL/3cmbYMuRC
+dWKuh+vy1dneVrOfzM4UKLkNl2BcEkxY5NM9g0lFWJc1aRqoR+pWxnmrEthngYTf
+fwk8lOa4JiwgvT2zKIn3X/8i4peEH+ll74fg38FnSbNd67IJKusm7Xi+fT8r87cm
+NW1fiQG2SVufAQWbqz0lwcy2f8Lxb4bG+mRo64EtlOtCt/qMHt1i8b5QZ7dsvfPx
+H2sMNgcWfzd8qVttevESRmCD1ycEvkvOl77DZypoEd+A5wwzZr8TDRRu838fYxAe
++o0bJW1sj6W3YQGx0qMmoRBxna3iw/nDmVG3KwcIzi7mULKn+gpFL6Lw8g==
+-----END CERTIFICATE-----
+
+# Issuer: CN=DigiCert Global Root CA O=DigiCert Inc OU=www.digicert.com
+# Subject: CN=DigiCert Global Root CA O=DigiCert Inc OU=www.digicert.com
+# Label: "DigiCert Global Root CA"
+# Serial: 10944719598952040374951832963794454346
+# MD5 Fingerprint: 79:e4:a9:84:0d:7d:3a:96:d7:c0:4f:e2:43:4c:89:2e
+# SHA1 Fingerprint: a8:98:5d:3a:65:e5:e5:c4:b2:d7:d6:6d:40:c6:dd:2f:b1:9c:54:36
+# SHA256 Fingerprint: 43:48:a0:e9:44:4c:78:cb:26:5e:05:8d:5e:89:44:b4:d8:4f:96:62:bd:26:db:25:7f:89:34:a4:43:c7:01:61
+-----BEGIN CERTIFICATE-----
+MIIDrzCCApegAwIBAgIQCDvgVpBCRrGhdWrJWZHHSjANBgkqhkiG9w0BAQUFADBh
+MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3
+d3cuZGlnaWNlcnQuY29tMSAwHgYDVQQDExdEaWdpQ2VydCBHbG9iYWwgUm9vdCBD
+QTAeFw0wNjExMTAwMDAwMDBaFw0zMTExMTAwMDAwMDBaMGExCzAJBgNVBAYTAlVT
+MRUwEwYDVQQKEwxEaWdpQ2VydCBJbmMxGTAXBgNVBAsTEHd3dy5kaWdpY2VydC5j
+b20xIDAeBgNVBAMTF0RpZ2lDZXJ0IEdsb2JhbCBSb290IENBMIIBIjANBgkqhkiG
+9w0BAQEFAAOCAQ8AMIIBCgKCAQEA4jvhEXLeqKTTo1eqUKKPC3eQyaKl7hLOllsB
+CSDMAZOnTjC3U/dDxGkAV53ijSLdhwZAAIEJzs4bg7/fzTtxRuLWZscFs3YnFo97
+nh6Vfe63SKMI2tavegw5BmV/Sl0fvBf4q77uKNd0f3p4mVmFaG5cIzJLv07A6Fpt
+43C/dxC//AH2hdmoRBBYMql1GNXRor5H4idq9Joz+EkIYIvUX7Q6hL+hqkpMfT7P
+T19sdl6gSzeRntwi5m3OFBqOasv+zbMUZBfHWymeMr/y7vrTC0LUq7dBMtoM1O/4
+gdW7jVg/tRvoSSiicNoxBN33shbyTApOB6jtSj1etX+jkMOvJwIDAQABo2MwYTAO
+BgNVHQ8BAf8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUA95QNVbR
+TLtm8KPiGxvDl7I90VUwHwYDVR0jBBgwFoAUA95QNVbRTLtm8KPiGxvDl7I90VUw
+DQYJKoZIhvcNAQEFBQADggEBAMucN6pIExIK+t1EnE9SsPTfrgT1eXkIoyQY/Esr
+hMAtudXH/vTBH1jLuG2cenTnmCmrEbXjcKChzUyImZOMkXDiqw8cvpOp/2PV5Adg
+06O/nVsJ8dWO41P0jmP6P6fbtGbfYmbW0W5BjfIttep3Sp+dWOIrWcBAI+0tKIJF
+PnlUkiaY4IBIqDfv8NZ5YBberOgOzW6sRBc4L0na4UU+Krk2U886UAb3LujEV0ls
+YSEY1QSteDwsOoBrp+uvFRTp2InBuThs4pFsiv9kuXclVzDAGySj4dzp30d8tbQk
+CAUw7C29C79Fv1C5qfPrmAESrciIxpg0X40KPMbp1ZWVbd4=
+-----END CERTIFICATE-----
+
+# Issuer: CN=DigiCert High Assurance EV Root CA O=DigiCert Inc OU=www.digicert.com
+# Subject: CN=DigiCert High Assurance EV Root CA O=DigiCert Inc OU=www.digicert.com
+# Label: "DigiCert High Assurance EV Root CA"
+# Serial: 3553400076410547919724730734378100087
+# MD5 Fingerprint: d4:74:de:57:5c:39:b2:d3:9c:85:83:c5:c0:65:49:8a
+# SHA1 Fingerprint: 5f:b7:ee:06:33:e2:59:db:ad:0c:4c:9a:e6:d3:8f:1a:61:c7:dc:25
+# SHA256 Fingerprint: 74:31:e5:f4:c3:c1:ce:46:90:77:4f:0b:61:e0:54:40:88:3b:a9:a0:1e:d0:0b:a6:ab:d7:80:6e:d3:b1:18:cf
+-----BEGIN CERTIFICATE-----
+MIIDxTCCAq2gAwIBAgIQAqxcJmoLQJuPC3nyrkYldzANBgkqhkiG9w0BAQUFADBs
+MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3
+d3cuZGlnaWNlcnQuY29tMSswKQYDVQQDEyJEaWdpQ2VydCBIaWdoIEFzc3VyYW5j
+ZSBFViBSb290IENBMB4XDTA2MTExMDAwMDAwMFoXDTMxMTExMDAwMDAwMFowbDEL
+MAkGA1UEBhMCVVMxFTATBgNVBAoTDERpZ2lDZXJ0IEluYzEZMBcGA1UECxMQd3d3
+LmRpZ2ljZXJ0LmNvbTErMCkGA1UEAxMiRGlnaUNlcnQgSGlnaCBBc3N1cmFuY2Ug
+RVYgUm9vdCBDQTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAMbM5XPm
++9S75S0tMqbf5YE/yc0lSbZxKsPVlDRnogocsF9ppkCxxLeyj9CYpKlBWTrT3JTW
+PNt0OKRKzE0lgvdKpVMSOO7zSW1xkX5jtqumX8OkhPhPYlG++MXs2ziS4wblCJEM
+xChBVfvLWokVfnHoNb9Ncgk9vjo4UFt3MRuNs8ckRZqnrG0AFFoEt7oT61EKmEFB
+Ik5lYYeBQVCmeVyJ3hlKV9Uu5l0cUyx+mM0aBhakaHPQNAQTXKFx01p8VdteZOE3
+hzBWBOURtCmAEvF5OYiiAhF8J2a3iLd48soKqDirCmTCv2ZdlYTBoSUeh10aUAsg
+EsxBu24LUTi4S8sCAwEAAaNjMGEwDgYDVR0PAQH/BAQDAgGGMA8GA1UdEwEB/wQF
+MAMBAf8wHQYDVR0OBBYEFLE+w2kD+L9HAdSYJhoIAu9jZCvDMB8GA1UdIwQYMBaA
+FLE+w2kD+L9HAdSYJhoIAu9jZCvDMA0GCSqGSIb3DQEBBQUAA4IBAQAcGgaX3Nec
+nzyIZgYIVyHbIUf4KmeqvxgydkAQV8GK83rZEWWONfqe/EW1ntlMMUu4kehDLI6z
+eM7b41N5cdblIZQB2lWHmiRk9opmzN6cN82oNLFpmyPInngiK3BD41VHMWEZ71jF
+hS9OMPagMRYjyOfiZRYzy78aG6A9+MpeizGLYAiJLQwGXFK3xPkKmNEVX58Svnw2
+Yzi9RKR/5CYrCsSXaQ3pjOLAEFe4yHYSkVXySGnYvCoCWw9E1CAx2/S6cCZdkGCe
+vEsXCS+0yx5DaMkHJ8HSXPfqIbloEpw8nL+e/IBcm2PN7EeqJSdnoDfzAIJ9VNep
++OkuE6N36B9K
+-----END CERTIFICATE-----
+
+# Issuer: CN=SwissSign Gold CA - G2 O=SwissSign AG
+# Subject: CN=SwissSign Gold CA - G2 O=SwissSign AG
+# Label: "SwissSign Gold CA - G2"
+# Serial: 13492815561806991280
+# MD5 Fingerprint: 24:77:d9:a8:91:d1:3b:fa:88:2d:c2:ff:f8:cd:33:93
+# SHA1 Fingerprint: d8:c5:38:8a:b7:30:1b:1b:6e:d4:7a:e6:45:25:3a:6f:9f:1a:27:61
+# SHA256 Fingerprint: 62:dd:0b:e9:b9:f5:0a:16:3e:a0:f8:e7:5c:05:3b:1e:ca:57:ea:55:c8:68:8f:64:7c:68:81:f2:c8:35:7b:95
+-----BEGIN CERTIFICATE-----
+MIIFujCCA6KgAwIBAgIJALtAHEP1Xk+wMA0GCSqGSIb3DQEBBQUAMEUxCzAJBgNV
+BAYTAkNIMRUwEwYDVQQKEwxTd2lzc1NpZ24gQUcxHzAdBgNVBAMTFlN3aXNzU2ln
+biBHb2xkIENBIC0gRzIwHhcNMDYxMDI1MDgzMDM1WhcNMzYxMDI1MDgzMDM1WjBF
+MQswCQYDVQQGEwJDSDEVMBMGA1UEChMMU3dpc3NTaWduIEFHMR8wHQYDVQQDExZT
+d2lzc1NpZ24gR29sZCBDQSAtIEcyMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIIC
+CgKCAgEAr+TufoskDhJuqVAtFkQ7kpJcyrhdhJJCEyq8ZVeCQD5XJM1QiyUqt2/8
+76LQwB8CJEoTlo8jE+YoWACjR8cGp4QjK7u9lit/VcyLwVcfDmJlD909Vopz2q5+
+bbqBHH5CjCA12UNNhPqE21Is8w4ndwtrvxEvcnifLtg+5hg3Wipy+dpikJKVyh+c
+6bM8K8vzARO/Ws/BtQpgvd21mWRTuKCWs2/iJneRjOBiEAKfNA+k1ZIzUd6+jbqE
+emA8atufK+ze3gE/bk3lUIbLtK/tREDFylqM2tIrfKjuvqblCqoOpd8FUrdVxyJd
+MmqXl2MT28nbeTZ7hTpKxVKJ+STnnXepgv9VHKVxaSvRAiTysybUa9oEVeXBCsdt
+MDeQKuSeFDNeFhdVxVu1yzSJkvGdJo+hB9TGsnhQ2wwMC3wLjEHXuendjIj3o02y
+MszYF9rNt85mndT9Xv+9lz4pded+p2JYryU0pUHHPbwNUMoDAw8IWh+Vc3hiv69y
+FGkOpeUDDniOJihC8AcLYiAQZzlG+qkDzAQ4embvIIO1jEpWjpEA/I5cgt6IoMPi
+aG59je883WX0XaxR7ySArqpWl2/5rX3aYT+YdzylkbYcjCbaZaIJbcHiVOO5ykxM
+gI93e2CaHt+28kgeDrpOVG2Y4OGiGqJ3UM/EY5LsRxmd6+ZrzsECAwEAAaOBrDCB
+qTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUWyV7
+lqRlUX64OfPAeGZe6Drn8O4wHwYDVR0jBBgwFoAUWyV7lqRlUX64OfPAeGZe6Drn
+8O4wRgYDVR0gBD8wPTA7BglghXQBWQECAQEwLjAsBggrBgEFBQcCARYgaHR0cDov
+L3JlcG9zaXRvcnkuc3dpc3NzaWduLmNvbS8wDQYJKoZIhvcNAQEFBQADggIBACe6
+45R88a7A3hfm5djV9VSwg/S7zV4Fe0+fdWavPOhWfvxyeDgD2StiGwC5+OlgzczO
+UYrHUDFu4Up+GC9pWbY9ZIEr44OE5iKHjn3g7gKZYbge9LgriBIWhMIxkziWMaa5
+O1M/wySTVltpkuzFwbs4AOPsF6m43Md8AYOfMke6UiI0HTJ6CVanfCU2qT1L2sCC
+bwq7EsiHSycR+R4tx5M/nttfJmtS2S6K8RTGRI0Vqbe/vd6mGu6uLftIdxf+u+yv
+GPUqUfA5hJeVbG4bwyvEdGB5JbAKJ9/fXtI5z0V9QkvfsywexcZdylU6oJxpmo/a
+77KwPJ+HbBIrZXAVUjEaJM9vMSNQH4xPjyPDdEFjHFWoFN0+4FFQz/EbMFYOkrCC
+hdiDyyJkvC24JdVUorgG6q2SpCSgwYa1ShNqR88uC1aVVMvOmttqtKay20EIhid3
+92qgQmwLOM7XdVAyksLfKzAiSNDVQTglXaTpXZ/GlHXQRf0wl0OPkKsKx4ZzYEpp
+Ld6leNcG2mqeSz53OiATIgHQv2ieY2BrNU0LbbqhPcCT4H8js1WtciVORvnSFu+w
+ZMEBnunKoGqYDs/YYPIvSbjkQuE4NRb0yG5P94FW6LqjviOvrv1vA+ACOzB2+htt
+Qc8Bsem4yWb02ybzOqR08kkkW8mw0FfB+j564ZfJ
+-----END CERTIFICATE-----
+
+# Issuer: CN=SwissSign Silver CA - G2 O=SwissSign AG
+# Subject: CN=SwissSign Silver CA - G2 O=SwissSign AG
+# Label: "SwissSign Silver CA - G2"
+# Serial: 5700383053117599563
+# MD5 Fingerprint: e0:06:a1:c9:7d:cf:c9:fc:0d:c0:56:75:96:d8:62:13
+# SHA1 Fingerprint: 9b:aa:e5:9f:56:ee:21:cb:43:5a:be:25:93:df:a7:f0:40:d1:1d:cb
+# SHA256 Fingerprint: be:6c:4d:a2:bb:b9:ba:59:b6:f3:93:97:68:37:42:46:c3:c0:05:99:3f:a9:8f:02:0d:1d:ed:be:d4:8a:81:d5
+-----BEGIN CERTIFICATE-----
+MIIFvTCCA6WgAwIBAgIITxvUL1S7L0swDQYJKoZIhvcNAQEFBQAwRzELMAkGA1UE
+BhMCQ0gxFTATBgNVBAoTDFN3aXNzU2lnbiBBRzEhMB8GA1UEAxMYU3dpc3NTaWdu
+IFNpbHZlciBDQSAtIEcyMB4XDTA2MTAyNTA4MzI0NloXDTM2MTAyNTA4MzI0Nlow
+RzELMAkGA1UEBhMCQ0gxFTATBgNVBAoTDFN3aXNzU2lnbiBBRzEhMB8GA1UEAxMY
+U3dpc3NTaWduIFNpbHZlciBDQSAtIEcyMIICIjANBgkqhkiG9w0BAQEFAAOCAg8A
+MIICCgKCAgEAxPGHf9N4Mfc4yfjDmUO8x/e8N+dOcbpLj6VzHVxumK4DV644N0Mv
+Fz0fyM5oEMF4rhkDKxD6LHmD9ui5aLlV8gREpzn5/ASLHvGiTSf5YXu6t+WiE7br
+YT7QbNHm+/pe7R20nqA1W6GSy/BJkv6FCgU+5tkL4k+73JU3/JHpMjUi0R86TieF
+nbAVlDLaYQ1HTWBCrpJH6INaUFjpiou5XaHc3ZlKHzZnu0jkg7Y360g6rw9njxcH
+6ATK72oxh9TAtvmUcXtnZLi2kUpCe2UuMGoM9ZDulebyzYLs2aFK7PayS+VFheZt
+eJMELpyCbTapxDFkH4aDCyr0NQp4yVXPQbBH6TCfmb5hqAaEuSh6XzjZG6k4sIN/
+c8HDO0gqgg8hm7jMqDXDhBuDsz6+pJVpATqJAHgE2cn0mRmrVn5bi4Y5FZGkECwJ
+MoBgs5PAKrYYC51+jUnyEEp/+dVGLxmSo5mnJqy7jDzmDrxHB9xzUfFwZC8I+bRH
+HTBsROopN4WSaGa8gzj+ezku01DwH/teYLappvonQfGbGHLy9YR0SslnxFSuSGTf
+jNFusB3hB48IHpmccelM2KX3RxIfdNFRnobzwqIjQAtz20um53MGjMGg6cFZrEb6
+5i/4z3GcRm25xBWNOHkDRUjvxF3XCO6HOSKGsg0PWEP3calILv3q1h8CAwEAAaOB
+rDCBqTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQU
+F6DNweRBtjpbO8tFnb0cwpj6hlgwHwYDVR0jBBgwFoAUF6DNweRBtjpbO8tFnb0c
+wpj6hlgwRgYDVR0gBD8wPTA7BglghXQBWQEDAQEwLjAsBggrBgEFBQcCARYgaHR0
+cDovL3JlcG9zaXRvcnkuc3dpc3NzaWduLmNvbS8wDQYJKoZIhvcNAQEFBQADggIB
+AHPGgeAn0i0P4JUw4ppBf1AsX19iYamGamkYDHRJ1l2E6kFSGG9YrVBWIGrGvShp
+WJHckRE1qTodvBqlYJ7YH39FkWnZfrt4csEGDyrOj4VwYaygzQu4OSlWhDJOhrs9
+xCrZ1x9y7v5RoSJBsXECYxqCsGKrXlcSH9/L3XWgwF15kIwb4FDm3jH+mHtwX6WQ
+2K34ArZv02DdQEsixT2tOnqfGhpHkXkzuoLcMmkDlm4fS/Bx/uNncqCxv1yL5PqZ
+IseEuRuNI5c/7SXgz2W79WEE790eslpBIlqhn10s6FvJbakMDHiqYMZWjwFaDGi8
+aRl5xB9+lwW/xekkUV7U1UtT7dkjWjYDZaPBA61BMPNGG4WQr2W11bHkFlt4dR2X
+em1ZqSqPe97Dh4kQmUlzeMg9vVE1dCrV8X5pGyq7O70luJpaPXJhkGaH7gzWTdQR
+dAtq/gsD/KNVV4n+SsuuWxcFyPKNIzFTONItaj+CuY0IavdeQXRuwxF+B6wpYJE/
+OMpXEA29MC/HpeZBoNquBYeaoKRlbEwJDIm6uNO5wJOKMPqN5ZprFQFOZ6raYlY+
+hAhm0sQ2fac+EPyI4NSA5QC9qvNOBqN6avlicuMJT+ubDgEj8Z+7fNzcbBGXJbLy
+tGMU0gYqZ4yD9c7qB9iaah7s5Aq7KkzrCWA5zspi2C5u
+-----END CERTIFICATE-----
+
+# Issuer: CN=SecureTrust CA O=SecureTrust Corporation
+# Subject: CN=SecureTrust CA O=SecureTrust Corporation
+# Label: "SecureTrust CA"
+# Serial: 17199774589125277788362757014266862032
+# MD5 Fingerprint: dc:32:c3:a7:6d:25:57:c7:68:09:9d:ea:2d:a9:a2:d1
+# SHA1 Fingerprint: 87:82:c6:c3:04:35:3b:cf:d2:96:92:d2:59:3e:7d:44:d9:34:ff:11
+# SHA256 Fingerprint: f1:c1:b5:0a:e5:a2:0d:d8:03:0e:c9:f6:bc:24:82:3d:d3:67:b5:25:57:59:b4:e7:1b:61:fc:e9:f7:37:5d:73
+-----BEGIN CERTIFICATE-----
+MIIDuDCCAqCgAwIBAgIQDPCOXAgWpa1Cf/DrJxhZ0DANBgkqhkiG9w0BAQUFADBI
+MQswCQYDVQQGEwJVUzEgMB4GA1UEChMXU2VjdXJlVHJ1c3QgQ29ycG9yYXRpb24x
+FzAVBgNVBAMTDlNlY3VyZVRydXN0IENBMB4XDTA2MTEwNzE5MzExOFoXDTI5MTIz
+MTE5NDA1NVowSDELMAkGA1UEBhMCVVMxIDAeBgNVBAoTF1NlY3VyZVRydXN0IENv
+cnBvcmF0aW9uMRcwFQYDVQQDEw5TZWN1cmVUcnVzdCBDQTCCASIwDQYJKoZIhvcN
+AQEBBQADggEPADCCAQoCggEBAKukgeWVzfX2FI7CT8rU4niVWJxB4Q2ZQCQXOZEz
+Zum+4YOvYlyJ0fwkW2Gz4BERQRwdbvC4u/jep4G6pkjGnx29vo6pQT64lO0pGtSO
+0gMdA+9tDWccV9cGrcrI9f4Or2YlSASWC12juhbDCE/RRvgUXPLIXgGZbf2IzIao
+wW8xQmxSPmjL8xk037uHGFaAJsTQ3MBv396gwpEWoGQRS0S8Hvbn+mPeZqx2pHGj
+7DaUaHp3pLHnDi+BeuK1cobvomuL8A/b01k/unK8RCSc43Oz969XL0Imnal0ugBS
+8kvNU3xHCzaFDmapCJcWNFfBZveA4+1wVMeT4C4oFVmHursCAwEAAaOBnTCBmjAT
+BgkrBgEEAYI3FAIEBh4EAEMAQTALBgNVHQ8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB
+/zAdBgNVHQ4EFgQUQjK2FvoE/f5dS3rD/fdMQB1aQ68wNAYDVR0fBC0wKzApoCeg
+JYYjaHR0cDovL2NybC5zZWN1cmV0cnVzdC5jb20vU1RDQS5jcmwwEAYJKwYBBAGC
+NxUBBAMCAQAwDQYJKoZIhvcNAQEFBQADggEBADDtT0rhWDpSclu1pqNlGKa7UTt3
+6Z3q059c4EVlew3KW+JwULKUBRSuSceNQQcSc5R+DCMh/bwQf2AQWnL1mA6s7Ll/
+3XpvXdMc9P+IBWlCqQVxyLesJugutIxq/3HcuLHfmbx8IVQr5Fiiu1cprp6poxkm
+D5kuCLDv/WnPmRoJjeOnnyvJNjR7JLN4TJUXpAYmHrZkUjZfYGfZnMUFdAvnZyPS
+CPyI6a6Lf+Ew9Dd+/cYy2i2eRDAwbO4H3tI0/NL/QPZL9GZGBlSm8jIKYyYwa5vR
+3ItHuuG51WLQoqD0ZwV4KWMabwTW+MZMo5qxN7SN5ShLHZ4swrhovO0C7jE=
+-----END CERTIFICATE-----
+
+# Issuer: CN=Secure Global CA O=SecureTrust Corporation
+# Subject: CN=Secure Global CA O=SecureTrust Corporation
+# Label: "Secure Global CA"
+# Serial: 9751836167731051554232119481456978597
+# MD5 Fingerprint: cf:f4:27:0d:d4:ed:dc:65:16:49:6d:3d:da:bf:6e:de
+# SHA1 Fingerprint: 3a:44:73:5a:e5:81:90:1f:24:86:61:46:1e:3b:9c:c4:5f:f5:3a:1b
+# SHA256 Fingerprint: 42:00:f5:04:3a:c8:59:0e:bb:52:7d:20:9e:d1:50:30:29:fb:cb:d4:1c:a1:b5:06:ec:27:f1:5a:de:7d:ac:69
+-----BEGIN CERTIFICATE-----
+MIIDvDCCAqSgAwIBAgIQB1YipOjUiolN9BPI8PjqpTANBgkqhkiG9w0BAQUFADBK
+MQswCQYDVQQGEwJVUzEgMB4GA1UEChMXU2VjdXJlVHJ1c3QgQ29ycG9yYXRpb24x
+GTAXBgNVBAMTEFNlY3VyZSBHbG9iYWwgQ0EwHhcNMDYxMTA3MTk0MjI4WhcNMjkx
+MjMxMTk1MjA2WjBKMQswCQYDVQQGEwJVUzEgMB4GA1UEChMXU2VjdXJlVHJ1c3Qg
+Q29ycG9yYXRpb24xGTAXBgNVBAMTEFNlY3VyZSBHbG9iYWwgQ0EwggEiMA0GCSqG
+SIb3DQEBAQUAA4IBDwAwggEKAoIBAQCvNS7YrGxVaQZx5RNoJLNP2MwhR/jxYDiJ
+iQPpvepeRlMJ3Fz1Wuj3RSoC6zFh1ykzTM7HfAo3fg+6MpjhHZevj8fcyTiW89sa
+/FHtaMbQbqR8JNGuQsiWUGMu4P51/pinX0kuleM5M2SOHqRfkNJnPLLZ/kG5VacJ
+jnIFHovdRIWCQtBJwB1g8NEXLJXr9qXBkqPFwqcIYA1gBBCWeZ4WNOaptvolRTnI
+HmX5k/Wq8VLcmZg9pYYaDDUz+kulBAYVHDGA76oYa8J719rO+TMg1fW9ajMtgQT7
+sFzUnKPiXB3jqUJ1XnvUd+85VLrJChgbEplJL4hL/VBi0XPnj3pDAgMBAAGjgZ0w
+gZowEwYJKwYBBAGCNxQCBAYeBABDAEEwCwYDVR0PBAQDAgGGMA8GA1UdEwEB/wQF
+MAMBAf8wHQYDVR0OBBYEFK9EBMJBfkiD2045AuzshHrmzsmkMDQGA1UdHwQtMCsw
+KaAnoCWGI2h0dHA6Ly9jcmwuc2VjdXJldHJ1c3QuY29tL1NHQ0EuY3JsMBAGCSsG
+AQQBgjcVAQQDAgEAMA0GCSqGSIb3DQEBBQUAA4IBAQBjGghAfaReUw132HquHw0L
+URYD7xh8yOOvaliTFGCRsoTciE6+OYo68+aCiV0BN7OrJKQVDpI1WkpEXk5X+nXO
+H0jOZvQ8QCaSmGwb7iRGDBezUqXbpZGRzzfTb+cnCDpOGR86p1hcF895P4vkp9Mm
+I50mD1hp/Ed+stCNi5O/KU9DaXR2Z0vPB4zmAve14bRDtUstFJ/53CYNv6ZHdAbY
+iNE6KTCEztI5gGIbqMdXSbxqVVFnFUq+NQfk1XWYN3kwFNspnWzFacxHVaIw98xc
+f8LDmBxrThaA63p4ZUWiABqvDA1VZDRIuJK58bRQKfJPIx/abKwfROHdI3hRW8cW
+-----END CERTIFICATE-----
+
+# Issuer: CN=COMODO Certification Authority O=COMODO CA Limited
+# Subject: CN=COMODO Certification Authority O=COMODO CA Limited
+# Label: "COMODO Certification Authority"
+# Serial: 104350513648249232941998508985834464573
+# MD5 Fingerprint: 5c:48:dc:f7:42:72:ec:56:94:6d:1c:cc:71:35:80:75
+# SHA1 Fingerprint: 66:31:bf:9e:f7:4f:9e:b6:c9:d5:a6:0c:ba:6a:be:d1:f7:bd:ef:7b
+# SHA256 Fingerprint: 0c:2c:d6:3d:f7:80:6f:a3:99:ed:e8:09:11:6b:57:5b:f8:79:89:f0:65:18:f9:80:8c:86:05:03:17:8b:af:66
+-----BEGIN CERTIFICATE-----
+MIIEHTCCAwWgAwIBAgIQToEtioJl4AsC7j41AkblPTANBgkqhkiG9w0BAQUFADCB
+gTELMAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4G
+A1UEBxMHU2FsZm9yZDEaMBgGA1UEChMRQ09NT0RPIENBIExpbWl0ZWQxJzAlBgNV
+BAMTHkNPTU9ETyBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0wNjEyMDEwMDAw
+MDBaFw0yOTEyMzEyMzU5NTlaMIGBMQswCQYDVQQGEwJHQjEbMBkGA1UECBMSR3Jl
+YXRlciBNYW5jaGVzdGVyMRAwDgYDVQQHEwdTYWxmb3JkMRowGAYDVQQKExFDT01P
+RE8gQ0EgTGltaXRlZDEnMCUGA1UEAxMeQ09NT0RPIENlcnRpZmljYXRpb24gQXV0
+aG9yaXR5MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA0ECLi3LjkRv3
+UcEbVASY06m/weaKXTuH+7uIzg3jLz8GlvCiKVCZrts7oVewdFFxze1CkU1B/qnI
+2GqGd0S7WWaXUF601CxwRM/aN5VCaTwwxHGzUvAhTaHYujl8HJ6jJJ3ygxaYqhZ8
+Q5sVW7euNJH+1GImGEaaP+vB+fGQV+useg2L23IwambV4EajcNxo2f8ESIl33rXp
++2dtQem8Ob0y2WIC8bGoPW43nOIv4tOiJovGuFVDiOEjPqXSJDlqR6sA1KGzqSX+
+DT+nHbrTUcELpNqsOO9VUCQFZUaTNE8tja3G1CEZ0o7KBWFxB3NH5YoZEr0ETc5O
+nKVIrLsm9wIDAQABo4GOMIGLMB0GA1UdDgQWBBQLWOWLxkwVN6RAqTCpIb5HNlpW
+/zAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zBJBgNVHR8EQjBAMD6g
+PKA6hjhodHRwOi8vY3JsLmNvbW9kb2NhLmNvbS9DT01PRE9DZXJ0aWZpY2F0aW9u
+QXV0aG9yaXR5LmNybDANBgkqhkiG9w0BAQUFAAOCAQEAPpiem/Yb6dc5t3iuHXIY
+SdOH5EOC6z/JqvWote9VfCFSZfnVDeFs9D6Mk3ORLgLETgdxb8CPOGEIqB6BCsAv
+IC9Bi5HcSEW88cbeunZrM8gALTFGTO3nnc+IlP8zwFboJIYmuNg4ON8qa90SzMc/
+RxdMosIGlgnW2/4/PEZB31jiVg88O8EckzXZOFKs7sjsLjBOlDW0JB9LeGna8gI4
+zJVSk/BwJVmcIGfE7vmLV2H0knZ9P4SNVbfo5azV8fUZVqZa+5Acr5Pr5RzUZ5dd
+BA6+C4OmF4O5MBKgxTMVBbkN+8cFduPYSo38NBejxiEovjBFMR7HeL5YYTisO+IB
+ZQ==
+-----END CERTIFICATE-----
+
+# Issuer: CN=COMODO ECC Certification Authority O=COMODO CA Limited
+# Subject: CN=COMODO ECC Certification Authority O=COMODO CA Limited
+# Label: "COMODO ECC Certification Authority"
+# Serial: 41578283867086692638256921589707938090
+# MD5 Fingerprint: 7c:62:ff:74:9d:31:53:5e:68:4a:d5:78:aa:1e:bf:23
+# SHA1 Fingerprint: 9f:74:4e:9f:2b:4d:ba:ec:0f:31:2c:50:b6:56:3b:8e:2d:93:c3:11
+# SHA256 Fingerprint: 17:93:92:7a:06:14:54:97:89:ad:ce:2f:8f:34:f7:f0:b6:6d:0f:3a:e3:a3:b8:4d:21:ec:15:db:ba:4f:ad:c7
+-----BEGIN CERTIFICATE-----
+MIICiTCCAg+gAwIBAgIQH0evqmIAcFBUTAGem2OZKjAKBggqhkjOPQQDAzCBhTEL
+MAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UE
+BxMHU2FsZm9yZDEaMBgGA1UEChMRQ09NT0RPIENBIExpbWl0ZWQxKzApBgNVBAMT
+IkNPTU9ETyBFQ0MgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDgwMzA2MDAw
+MDAwWhcNMzgwMTE4MjM1OTU5WjCBhTELMAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdy
+ZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UEBxMHU2FsZm9yZDEaMBgGA1UEChMRQ09N
+T0RPIENBIExpbWl0ZWQxKzApBgNVBAMTIkNPTU9ETyBFQ0MgQ2VydGlmaWNhdGlv
+biBBdXRob3JpdHkwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAAQDR3svdcmCFYX7deSR
+FtSrYpn1PlILBs5BAH+X4QokPB0BBO490o0JlwzgdeT6+3eKKvUDYEs2ixYjFq0J
+cfRK9ChQtP6IHG4/bC8vCVlbpVsLM5niwz2J+Wos77LTBumjQjBAMB0GA1UdDgQW
+BBR1cacZSBm8nZ3qQUfflMRId5nTeTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/
+BAUwAwEB/zAKBggqhkjOPQQDAwNoADBlAjEA7wNbeqy3eApyt4jf/7VGFAkK+qDm
+fQjGGoe9GKhzvSbKYAydzpmfz1wPMOG+FDHqAjAU9JM8SaczepBGR7NjfRObTrdv
+GDeAU/7dIOA1mjbRxwG55tzd8/8dLDoWV9mSOdY=
+-----END CERTIFICATE-----
+
+# Issuer: CN=Certigna O=Dhimyotis
+# Subject: CN=Certigna O=Dhimyotis
+# Label: "Certigna"
+# Serial: 18364802974209362175
+# MD5 Fingerprint: ab:57:a6:5b:7d:42:82:19:b5:d8:58:26:28:5e:fd:ff
+# SHA1 Fingerprint: b1:2e:13:63:45:86:a4:6f:1a:b2:60:68:37:58:2d:c4:ac:fd:94:97
+# SHA256 Fingerprint: e3:b6:a2:db:2e:d7:ce:48:84:2f:7a:c5:32:41:c7:b7:1d:54:14:4b:fb:40:c1:1f:3f:1d:0b:42:f5:ee:a1:2d
+-----BEGIN CERTIFICATE-----
+MIIDqDCCApCgAwIBAgIJAP7c4wEPyUj/MA0GCSqGSIb3DQEBBQUAMDQxCzAJBgNV
+BAYTAkZSMRIwEAYDVQQKDAlEaGlteW90aXMxETAPBgNVBAMMCENlcnRpZ25hMB4X
+DTA3MDYyOTE1MTMwNVoXDTI3MDYyOTE1MTMwNVowNDELMAkGA1UEBhMCRlIxEjAQ
+BgNVBAoMCURoaW15b3RpczERMA8GA1UEAwwIQ2VydGlnbmEwggEiMA0GCSqGSIb3
+DQEBAQUAA4IBDwAwggEKAoIBAQDIaPHJ1tazNHUmgh7stL7qXOEm7RFHYeGifBZ4
+QCHkYJ5ayGPhxLGWkv8YbWkj4Sti993iNi+RB7lIzw7sebYs5zRLcAglozyHGxny
+gQcPOJAZ0xH+hrTy0V4eHpbNgGzOOzGTtvKg0KmVEn2lmsxryIRWijOp5yIVUxbw
+zBfsV1/pogqYCd7jX5xv3EjjhQsVWqa6n6xI4wmy9/Qy3l40vhx4XUJbzg4ij02Q
+130yGLMLLGq/jj8UEYkgDncUtT2UCIf3JR7VsmAA7G8qKCVuKj4YYxclPz5EIBb2
+JsglrgVKtOdjLPOMFlN+XPsRGgjBRmKfIrjxwo1p3Po6WAbfAgMBAAGjgbwwgbkw
+DwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUGu3+QTmQtCRZvgHyUtVF9lo53BEw
+ZAYDVR0jBF0wW4AUGu3+QTmQtCRZvgHyUtVF9lo53BGhOKQ2MDQxCzAJBgNVBAYT
+AkZSMRIwEAYDVQQKDAlEaGlteW90aXMxETAPBgNVBAMMCENlcnRpZ25hggkA/tzj
+AQ/JSP8wDgYDVR0PAQH/BAQDAgEGMBEGCWCGSAGG+EIBAQQEAwIABzANBgkqhkiG
+9w0BAQUFAAOCAQEAhQMeknH2Qq/ho2Ge6/PAD/Kl1NqV5ta+aDY9fm4fTIrv0Q8h
+bV6lUmPOEvjvKtpv6zf+EwLHyzs+ImvaYS5/1HI93TDhHkxAGYwP15zRgzB7mFnc
+fca5DClMoTOi62c6ZYTTluLtdkVwj7Ur3vkj1kluPBS1xp81HlDQwY9qcEQCYsuu
+HWhBp6pX6FOqB9IG9tUUBguRA3UsbHK1YZWaDYu5Def131TN3ubY1gkIl2PlwS6w
+t0QmwCbAr1UwnjvVNioZBPRcHv/PLLf/0P2HQBHVESO7SMAhqaQoLf0V+LBOK/Qw
+WyH8EZE0vkHve52Xdf+XlcCWWC/qu0bXu+TZLg==
+-----END CERTIFICATE-----
+
+# Issuer: O=Chunghwa Telecom Co., Ltd. OU=ePKI Root Certification Authority
+# Subject: O=Chunghwa Telecom Co., Ltd. OU=ePKI Root Certification Authority
+# Label: "ePKI Root Certification Authority"
+# Serial: 28956088682735189655030529057352760477
+# MD5 Fingerprint: 1b:2e:00:ca:26:06:90:3d:ad:fe:6f:15:68:d3:6b:b3
+# SHA1 Fingerprint: 67:65:0d:f1:7e:8e:7e:5b:82:40:a4:f4:56:4b:cf:e2:3d:69:c6:f0
+# SHA256 Fingerprint: c0:a6:f4:dc:63:a2:4b:fd:cf:54:ef:2a:6a:08:2a:0a:72:de:35:80:3e:2f:f5:ff:52:7a:e5:d8:72:06:df:d5
+-----BEGIN CERTIFICATE-----
+MIIFsDCCA5igAwIBAgIQFci9ZUdcr7iXAF7kBtK8nTANBgkqhkiG9w0BAQUFADBe
+MQswCQYDVQQGEwJUVzEjMCEGA1UECgwaQ2h1bmdod2EgVGVsZWNvbSBDby4sIEx0
+ZC4xKjAoBgNVBAsMIWVQS0kgUm9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAe
+Fw0wNDEyMjAwMjMxMjdaFw0zNDEyMjAwMjMxMjdaMF4xCzAJBgNVBAYTAlRXMSMw
+IQYDVQQKDBpDaHVuZ2h3YSBUZWxlY29tIENvLiwgTHRkLjEqMCgGA1UECwwhZVBL
+SSBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MIICIjANBgkqhkiG9w0BAQEF
+AAOCAg8AMIICCgKCAgEA4SUP7o3biDN1Z82tH306Tm2d0y8U82N0ywEhajfqhFAH
+SyZbCUNsIZ5qyNUD9WBpj8zwIuQf5/dqIjG3LBXy4P4AakP/h2XGtRrBp0xtInAh
+ijHyl3SJCRImHJ7K2RKilTza6We/CKBk49ZCt0Xvl/T29de1ShUCWH2YWEtgvM3X
+DZoTM1PRYfl61dd4s5oz9wCGzh1NlDivqOx4UXCKXBCDUSH3ET00hl7lSM2XgYI1
+TBnsZfZrxQWh7kcT1rMhJ5QQCtkkO7q+RBNGMD+XPNjX12ruOzjjK9SXDrkb5wdJ
+fzcq+Xd4z1TtW0ado4AOkUPB1ltfFLqfpo0kR0BZv3I4sjZsN/+Z0V0OWQqraffA
+sgRFelQArr5T9rXn4fg8ozHSqf4hUmTFpmfwdQcGlBSBVcYn5AGPF8Fqcde+S/uU
+WH1+ETOxQvdibBjWzwloPn9s9h6PYq2lY9sJpx8iQkEeb5mKPtf5P0B6ebClAZLS
+nT0IFaUQAS2zMnaolQ2zepr7BxB4EW/hj8e6DyUadCrlHJhBmd8hh+iVBmoKs2pH
+dmX2Os+PYhcZewoozRrSgx4hxyy/vv9haLdnG7t4TY3OZ+XkwY63I2binZB1NJip
+NiuKmpS5nezMirH4JYlcWrYvjB9teSSnUmjDhDXiZo1jDiVN1Rmy5nk3pyKdVDEC
+AwEAAaNqMGgwHQYDVR0OBBYEFB4M97Zn8uGSJglFwFU5Lnc/QkqiMAwGA1UdEwQF
+MAMBAf8wOQYEZyoHAAQxMC8wLQIBADAJBgUrDgMCGgUAMAcGBWcqAwAABBRFsMLH
+ClZ87lt4DJX5GFPBphzYEDANBgkqhkiG9w0BAQUFAAOCAgEACbODU1kBPpVJufGB
+uvl2ICO1J2B01GqZNF5sAFPZn/KmsSQHRGoqxqWOeBLoR9lYGxMqXnmbnwoqZ6Yl
+PwZpVnPDimZI+ymBV3QGypzqKOg4ZyYr8dW1P2WT+DZdjo2NQCCHGervJ8A9tDkP
+JXtoUHRVnAxZfVo9QZQlUgjgRywVMRnVvwdVxrsStZf0X4OFunHB2WyBEXYKCrC/
+gpf36j36+uwtqSiUO1bd0lEursC9CBWMd1I0ltabrNMdjmEPNXubrjlpC2JgQCA2
+j6/7Nu4tCEoduL+bXPjqpRugc6bY+G7gMwRfaKonh+3ZwZCc7b3jajWvY9+rGNm6
+5ulK6lCKD2GTHuItGeIwlDWSXQ62B68ZgI9HkFFLLk3dheLSClIKF5r8GrBQAuUB
+o2M3IUxExJtRmREOc5wGj1QupyheRDmHVi03vYVElOEMSyycw5KFNGHLD7ibSkNS
+/jQ6fbjpKdx2qcgw+BRxgMYeNkh0IkFch4LoGHGLQYlE535YW6i4jRPpp2zDR+2z
+Gp1iro2C6pSe3VkQw63d4k3jMdXH7OjysP6SHhYKGvzZ8/gntsm+HbRsZJB/9OTE
+W9c3rkIO3aQab3yIVMUWbuF6aC74Or8NpDyJO3inTmODBCEIZ43ygknQW/2xzQ+D
+hNQ+IIX3Sj0rnP0qCglN6oH4EZw=
+-----END CERTIFICATE-----
+
+# Issuer: O=certSIGN OU=certSIGN ROOT CA
+# Subject: O=certSIGN OU=certSIGN ROOT CA
+# Label: "certSIGN ROOT CA"
+# Serial: 35210227249154
+# MD5 Fingerprint: 18:98:c0:d6:e9:3a:fc:f9:b0:f5:0c:f7:4b:01:44:17
+# SHA1 Fingerprint: fa:b7:ee:36:97:26:62:fb:2d:b0:2a:f6:bf:03:fd:e8:7c:4b:2f:9b
+# SHA256 Fingerprint: ea:a9:62:c4:fa:4a:6b:af:eb:e4:15:19:6d:35:1c:cd:88:8d:4f:53:f3:fa:8a:e6:d7:c4:66:a9:4e:60:42:bb
+-----BEGIN CERTIFICATE-----
+MIIDODCCAiCgAwIBAgIGIAYFFnACMA0GCSqGSIb3DQEBBQUAMDsxCzAJBgNVBAYT
+AlJPMREwDwYDVQQKEwhjZXJ0U0lHTjEZMBcGA1UECxMQY2VydFNJR04gUk9PVCBD
+QTAeFw0wNjA3MDQxNzIwMDRaFw0zMTA3MDQxNzIwMDRaMDsxCzAJBgNVBAYTAlJP
+MREwDwYDVQQKEwhjZXJ0U0lHTjEZMBcGA1UECxMQY2VydFNJR04gUk9PVCBDQTCC
+ASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBALczuX7IJUqOtdu0KBuqV5Do
+0SLTZLrTk+jUrIZhQGpgV2hUhE28alQCBf/fm5oqrl0Hj0rDKH/v+yv6efHHrfAQ
+UySQi2bJqIirr1qjAOm+ukbuW3N7LBeCgV5iLKECZbO9xSsAfsT8AzNXDe3i+s5d
+RdY4zTW2ssHQnIFKquSyAVwdj1+ZxLGt24gh65AIgoDzMKND5pCCrlUoSe1b16kQ
+OA7+j0xbm0bqQfWwCHTD0IgztnzXdN/chNFDDnU5oSVAKOp4yw4sLjmdjItuFhwv
+JoIQ4uNllAoEwF73XVv4EOLQunpL+943AAAaWyjj0pxzPjKHmKHJUS/X3qwzs08C
+AwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAcYwHQYDVR0O
+BBYEFOCMm9slSbPxfIbWskKHC9BroNnkMA0GCSqGSIb3DQEBBQUAA4IBAQA+0hyJ
+LjX8+HXd5n9liPRyTMks1zJO890ZeUe9jjtbkw9QSSQTaxQGcu8J06Gh40CEyecY
+MnQ8SG4Pn0vU9x7Tk4ZkVJdjclDVVc/6IJMCopvDI5NOFlV2oHB5bc0hH88vLbwZ
+44gx+FkagQnIl6Z0x2DEW8xXjrJ1/RsCCdtZb3KTafcxQdaIOL+Hsr0Wefmq5L6I
+Jd1hJyMctTEHBDa0GpC9oHRxUIltvBTjD4au8as+x6AJzKNI0eDbZOeStc+vckNw
+i/nDhDwTqn6Sm1dTk/pwwpEOMfmbZ13pljheX7NzTogVZ96edhBiIL5VaZVDADlN
+9u6wWk5JRFRYX0KD
+-----END CERTIFICATE-----
+
+# Issuer: CN=NetLock Arany (Class Gold) F\u0151tan\xfas\xedtv\xe1ny O=NetLock Kft. OU=Tan\xfas\xedtv\xe1nykiad\xf3k (Certification Services)
+# Subject: CN=NetLock Arany (Class Gold) F\u0151tan\xfas\xedtv\xe1ny O=NetLock Kft. OU=Tan\xfas\xedtv\xe1nykiad\xf3k (Certification Services)
+# Label: "NetLock Arany (Class Gold) F\u0151tan\xfas\xedtv\xe1ny"
+# Serial: 80544274841616
+# MD5 Fingerprint: c5:a1:b7:ff:73:dd:d6:d7:34:32:18:df:fc:3c:ad:88
+# SHA1 Fingerprint: 06:08:3f:59:3f:15:a1:04:a0:69:a4:6b:a9:03:d0:06:b7:97:09:91
+# SHA256 Fingerprint: 6c:61:da:c3:a2:de:f0:31:50:6b:e0:36:d2:a6:fe:40:19:94:fb:d1:3d:f9:c8:d4:66:59:92:74:c4:46:ec:98
+-----BEGIN CERTIFICATE-----
+MIIEFTCCAv2gAwIBAgIGSUEs5AAQMA0GCSqGSIb3DQEBCwUAMIGnMQswCQYDVQQG
+EwJIVTERMA8GA1UEBwwIQnVkYXBlc3QxFTATBgNVBAoMDE5ldExvY2sgS2Z0LjE3
+MDUGA1UECwwuVGFuw7pzw610dsOhbnlraWFkw7NrIChDZXJ0aWZpY2F0aW9uIFNl
+cnZpY2VzKTE1MDMGA1UEAwwsTmV0TG9jayBBcmFueSAoQ2xhc3MgR29sZCkgRsWR
+dGFuw7pzw610dsOhbnkwHhcNMDgxMjExMTUwODIxWhcNMjgxMjA2MTUwODIxWjCB
+pzELMAkGA1UEBhMCSFUxETAPBgNVBAcMCEJ1ZGFwZXN0MRUwEwYDVQQKDAxOZXRM
+b2NrIEtmdC4xNzA1BgNVBAsMLlRhbsO6c8OtdHbDoW55a2lhZMOzayAoQ2VydGlm
+aWNhdGlvbiBTZXJ2aWNlcykxNTAzBgNVBAMMLE5ldExvY2sgQXJhbnkgKENsYXNz
+IEdvbGQpIEbFkXRhbsO6c8OtdHbDoW55MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8A
+MIIBCgKCAQEAxCRec75LbRTDofTjl5Bu0jBFHjzuZ9lk4BqKf8owyoPjIMHj9DrT
+lF8afFttvzBPhCf2nx9JvMaZCpDyD/V/Q4Q3Y1GLeqVw/HpYzY6b7cNGbIRwXdrz
+AZAj/E4wqX7hJ2Pn7WQ8oLjJM2P+FpD/sLj916jAwJRDC7bVWaaeVtAkH3B5r9s5
+VA1lddkVQZQBr17s9o3x/61k/iCa11zr/qYfCGSji3ZVrR47KGAuhyXoqq8fxmRG
+ILdwfzzeSNuWU7c5d+Qa4scWhHaXWy+7GRWF+GmF9ZmnqfI0p6m2pgP8b4Y9VHx2
+BJtr+UBdADTHLpl1neWIA6pN+APSQnbAGwIDAKiLo0UwQzASBgNVHRMBAf8ECDAG
+AQH/AgEEMA4GA1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUzPpnk/C2uNClwB7zU/2M
+U9+D15YwDQYJKoZIhvcNAQELBQADggEBAKt/7hwWqZw8UQCgwBEIBaeZ5m8BiFRh
+bvG5GK1Krf6BQCOUL/t1fC8oS2IkgYIL9WHxHG64YTjrgfpioTtaYtOUZcTh5m2C
++C8lcLIhJsFyUR+MLMOEkMNaj7rP9KdlpeuY0fsFskZ1FSNqb4VjMIDw1Z4fKRzC
+bLBQWV2QWzuoDTDPv31/zvGdg73JRm4gpvlhUbohL3u+pRVjodSVh/GeufOJ8z2F
+uLjbvrW5KfnaNwUASZQDhETnv0Mxz3WLJdH0pmT1kvarBes96aULNmLazAZfNou2
+XjG4Kvte9nHfRCaexOYNkbQudZWAUWpLMKawYqGT8ZvYzsRjdT9ZR7E=
+-----END CERTIFICATE-----
+
+# Issuer: CN=SecureSign RootCA11 O=Japan Certification Services, Inc.
+# Subject: CN=SecureSign RootCA11 O=Japan Certification Services, Inc.
+# Label: "SecureSign RootCA11"
+# Serial: 1
+# MD5 Fingerprint: b7:52:74:e2:92:b4:80:93:f2:75:e4:cc:d7:f2:ea:26
+# SHA1 Fingerprint: 3b:c4:9f:48:f8:f3:73:a0:9c:1e:bd:f8:5b:b1:c3:65:c7:d8:11:b3
+# SHA256 Fingerprint: bf:0f:ee:fb:9e:3a:58:1a:d5:f9:e9:db:75:89:98:57:43:d2:61:08:5c:4d:31:4f:6f:5d:72:59:aa:42:16:12
+-----BEGIN CERTIFICATE-----
+MIIDbTCCAlWgAwIBAgIBATANBgkqhkiG9w0BAQUFADBYMQswCQYDVQQGEwJKUDEr
+MCkGA1UEChMiSmFwYW4gQ2VydGlmaWNhdGlvbiBTZXJ2aWNlcywgSW5jLjEcMBoG
+A1UEAxMTU2VjdXJlU2lnbiBSb290Q0ExMTAeFw0wOTA0MDgwNDU2NDdaFw0yOTA0
+MDgwNDU2NDdaMFgxCzAJBgNVBAYTAkpQMSswKQYDVQQKEyJKYXBhbiBDZXJ0aWZp
+Y2F0aW9uIFNlcnZpY2VzLCBJbmMuMRwwGgYDVQQDExNTZWN1cmVTaWduIFJvb3RD
+QTExMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA/XeqpRyQBTvLTJsz
+i1oURaTnkBbR31fSIRCkF/3frNYfp+TbfPfs37gD2pRY/V1yfIw/XwFndBWW4wI8
+h9uuywGOwvNmxoVF9ALGOrVisq/6nL+k5tSAMJjzDbaTj6nU2DbysPyKyiyhFTOV
+MdrAG/LuYpmGYz+/3ZMqg6h2uRMft85OQoWPIucuGvKVCbIFtUROd6EgvanyTgp9
+UK31BQ1FT0Zx/Sg+U/sE2C3XZR1KG/rPO7AxmjVuyIsG0wCR8pQIZUyxNAYAeoni
+8McDWc/V1uinMrPmmECGxc0nEovMe863ETxiYAcjPitAbpSACW22s293bzUIUPsC
+h8U+iQIDAQABo0IwQDAdBgNVHQ4EFgQUW/hNT7KlhtQ60vFjmqC+CfZXt94wDgYD
+VR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEFBQADggEB
+AKChOBZmLqdWHyGcBvod7bkixTgm2E5P7KN/ed5GIaGHd48HCJqypMWvDzKYC3xm
+KbabfSVSSUOrTC4rbnpwrxYO4wJs+0LmGJ1F2FXI6Dvd5+H0LgscNFxsWEr7jIhQ
+X5Ucv+2rIrVls4W6ng+4reV6G4pQOh29Dbx7VFALuUKvVaAYga1lme++5Jy/xIWr
+QbJUb9wlze144o4MjQlJ3WN7WmmWAiGovVJZ6X01y8hSyn+B/tlr0/cR7SXf+Of5
+pPpyl4RTDaXQMhhRdlkUbA/r7F+AjHVDg8OFmP9Mni0N5HeDk061lgeLKBObjBmN
+QSdJQO7e5iNEOdyhIta6A/I=
+-----END CERTIFICATE-----
+
+# Issuer: CN=Microsec e-Szigno Root CA 2009 O=Microsec Ltd.
+# Subject: CN=Microsec e-Szigno Root CA 2009 O=Microsec Ltd.
+# Label: "Microsec e-Szigno Root CA 2009"
+# Serial: 14014712776195784473
+# MD5 Fingerprint: f8:49:f4:03:bc:44:2d:83:be:48:69:7d:29:64:fc:b1
+# SHA1 Fingerprint: 89:df:74:fe:5c:f4:0f:4a:80:f9:e3:37:7d:54:da:91:e1:01:31:8e
+# SHA256 Fingerprint: 3c:5f:81:fe:a5:fa:b8:2c:64:bf:a2:ea:ec:af:cd:e8:e0:77:fc:86:20:a7:ca:e5:37:16:3d:f3:6e:db:f3:78
+-----BEGIN CERTIFICATE-----
+MIIECjCCAvKgAwIBAgIJAMJ+QwRORz8ZMA0GCSqGSIb3DQEBCwUAMIGCMQswCQYD
+VQQGEwJIVTERMA8GA1UEBwwIQnVkYXBlc3QxFjAUBgNVBAoMDU1pY3Jvc2VjIEx0
+ZC4xJzAlBgNVBAMMHk1pY3Jvc2VjIGUtU3ppZ25vIFJvb3QgQ0EgMjAwOTEfMB0G
+CSqGSIb3DQEJARYQaW5mb0BlLXN6aWduby5odTAeFw0wOTA2MTYxMTMwMThaFw0y
+OTEyMzAxMTMwMThaMIGCMQswCQYDVQQGEwJIVTERMA8GA1UEBwwIQnVkYXBlc3Qx
+FjAUBgNVBAoMDU1pY3Jvc2VjIEx0ZC4xJzAlBgNVBAMMHk1pY3Jvc2VjIGUtU3pp
+Z25vIFJvb3QgQ0EgMjAwOTEfMB0GCSqGSIb3DQEJARYQaW5mb0BlLXN6aWduby5o
+dTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAOn4j/NjrdqG2KfgQvvP
+kd6mJviZpWNwrZuuyjNAfW2WbqEORO7hE52UQlKavXWFdCyoDh2Tthi3jCyoz/tc
+cbna7P7ofo/kLx2yqHWH2Leh5TvPmUpG0IMZfcChEhyVbUr02MelTTMuhTlAdX4U
+fIASmFDHQWe4oIBhVKZsTh/gnQ4H6cm6M+f+wFUoLAKApxn1ntxVUwOXewdI/5n7
+N4okxFnMUBBjjqqpGrCEGob5X7uxUG6k0QrM1XF+H6cbfPVTbiJfyyvm1HxdrtbC
+xkzlBQHZ7Vf8wSN5/PrIJIOV87VqUQHQd9bpEqH5GoP7ghu5sJf0dgYzQ0mg/wu1
++rUCAwEAAaOBgDB+MA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0G
+A1UdDgQWBBTLD8bfQkPMPcu1SCOhGnqmKrs0aDAfBgNVHSMEGDAWgBTLD8bfQkPM
+Pcu1SCOhGnqmKrs0aDAbBgNVHREEFDASgRBpbmZvQGUtc3ppZ25vLmh1MA0GCSqG
+SIb3DQEBCwUAA4IBAQDJ0Q5eLtXMs3w+y/w9/w0olZMEyL/azXm4Q5DwpL7v8u8h
+mLzU1F0G9u5C7DBsoKqpyvGvivo/C3NqPuouQH4frlRheesuCDfXI/OMn74dseGk
+ddug4lQUsbocKaQY9hK6ohQU4zE1yED/t+AFdlfBHFny+L/k7SViXITwfn4fs775
+tyERzAMBVnCnEJIeGzSBHq2cGsMEPO0CYdYeBvNfOofyK/FFh+U9rNHHV4S9a67c
+2Pm2G2JwCz02yULyMtd6YebS2z3PyKnJm9zbWETXbzivf3jTo60adbocwTZ8jx5t
+HMN1Rq41Bab2XD0h7lbwyYIiLXpUq3DDfSJlgnCW
+-----END CERTIFICATE-----
+
+# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R3
+# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R3
+# Label: "GlobalSign Root CA - R3"
+# Serial: 4835703278459759426209954
+# MD5 Fingerprint: c5:df:b8:49:ca:05:13:55:ee:2d:ba:1a:c3:3e:b0:28
+# SHA1 Fingerprint: d6:9b:56:11:48:f0:1c:77:c5:45:78:c1:09:26:df:5b:85:69:76:ad
+# SHA256 Fingerprint: cb:b5:22:d7:b7:f1:27:ad:6a:01:13:86:5b:df:1c:d4:10:2e:7d:07:59:af:63:5a:7c:f4:72:0d:c9:63:c5:3b
+-----BEGIN CERTIFICATE-----
+MIIDXzCCAkegAwIBAgILBAAAAAABIVhTCKIwDQYJKoZIhvcNAQELBQAwTDEgMB4G
+A1UECxMXR2xvYmFsU2lnbiBSb290IENBIC0gUjMxEzARBgNVBAoTCkdsb2JhbFNp
+Z24xEzARBgNVBAMTCkdsb2JhbFNpZ24wHhcNMDkwMzE4MTAwMDAwWhcNMjkwMzE4
+MTAwMDAwWjBMMSAwHgYDVQQLExdHbG9iYWxTaWduIFJvb3QgQ0EgLSBSMzETMBEG
+A1UEChMKR2xvYmFsU2lnbjETMBEGA1UEAxMKR2xvYmFsU2lnbjCCASIwDQYJKoZI
+hvcNAQEBBQADggEPADCCAQoCggEBAMwldpB5BngiFvXAg7aEyiie/QV2EcWtiHL8
+RgJDx7KKnQRfJMsuS+FggkbhUqsMgUdwbN1k0ev1LKMPgj0MK66X17YUhhB5uzsT
+gHeMCOFJ0mpiLx9e+pZo34knlTifBtc+ycsmWQ1z3rDI6SYOgxXG71uL0gRgykmm
+KPZpO/bLyCiR5Z2KYVc3rHQU3HTgOu5yLy6c+9C7v/U9AOEGM+iCK65TpjoWc4zd
+QQ4gOsC0p6Hpsk+QLjJg6VfLuQSSaGjlOCZgdbKfd/+RFO+uIEn8rUAVSNECMWEZ
+XriX7613t2Saer9fwRPvm2L7DWzgVGkWqQPabumDk3F2xmmFghcCAwEAAaNCMEAw
+DgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFI/wS3+o
+LkUkrk1Q+mOai97i3Ru8MA0GCSqGSIb3DQEBCwUAA4IBAQBLQNvAUKr+yAzv95ZU
+RUm7lgAJQayzE4aGKAczymvmdLm6AC2upArT9fHxD4q/c2dKg8dEe3jgr25sbwMp
+jjM5RcOO5LlXbKr8EpbsU8Yt5CRsuZRj+9xTaGdWPoO4zzUhw8lo/s7awlOqzJCK
+6fBdRoyV3XpYKBovHd7NADdBj+1EbddTKJd+82cEHhXXipa0095MJ6RMG3NzdvQX
+mcIfeg7jLQitChws/zyrVQ4PkX4268NXSb7hLi18YIvDQVETI53O9zJrlAGomecs
+Mx86OyXShkDOOyyGeMlhLxS67ttVb9+E7gUJTb0o2HLO02JQZR7rkpeDMdmztcpH
+WD9f
+-----END CERTIFICATE-----
+
+# Issuer: CN=Izenpe.com O=IZENPE S.A.
+# Subject: CN=Izenpe.com O=IZENPE S.A.
+# Label: "Izenpe.com"
+# Serial: 917563065490389241595536686991402621
+# MD5 Fingerprint: a6:b0:cd:85:80:da:5c:50:34:a3:39:90:2f:55:67:73
+# SHA1 Fingerprint: 2f:78:3d:25:52:18:a7:4a:65:39:71:b5:2c:a2:9c:45:15:6f:e9:19
+# SHA256 Fingerprint: 25:30:cc:8e:98:32:15:02:ba:d9:6f:9b:1f:ba:1b:09:9e:2d:29:9e:0f:45:48:bb:91:4f:36:3b:c0:d4:53:1f
+-----BEGIN CERTIFICATE-----
+MIIF8TCCA9mgAwIBAgIQALC3WhZIX7/hy/WL1xnmfTANBgkqhkiG9w0BAQsFADA4
+MQswCQYDVQQGEwJFUzEUMBIGA1UECgwLSVpFTlBFIFMuQS4xEzARBgNVBAMMCkl6
+ZW5wZS5jb20wHhcNMDcxMjEzMTMwODI4WhcNMzcxMjEzMDgyNzI1WjA4MQswCQYD
+VQQGEwJFUzEUMBIGA1UECgwLSVpFTlBFIFMuQS4xEzARBgNVBAMMCkl6ZW5wZS5j
+b20wggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDJ03rKDx6sp4boFmVq
+scIbRTJxldn+EFvMr+eleQGPicPK8lVx93e+d5TzcqQsRNiekpsUOqHnJJAKClaO
+xdgmlOHZSOEtPtoKct2jmRXagaKH9HtuJneJWK3W6wyyQXpzbm3benhB6QiIEn6H
+LmYRY2xU+zydcsC8Lv/Ct90NduM61/e0aL6i9eOBbsFGb12N4E3GVFWJGjMxCrFX
+uaOKmMPsOzTFlUFpfnXCPCDFYbpRR6AgkJOhkEvzTnyFRVSa0QUmQbC1TR0zvsQD
+yCV8wXDbO/QJLVQnSKwv4cSsPsjLkkxTOTcj7NMB+eAJRE1NZMDhDVqHIrytG6P+
+JrUV86f8hBnp7KGItERphIPzidF0BqnMC9bC3ieFUCbKF7jJeodWLBoBHmy+E60Q
+rLUk9TiRodZL2vG70t5HtfG8gfZZa88ZU+mNFctKy6lvROUbQc/hhqfK0GqfvEyN
+BjNaooXlkDWgYlwWTvDjovoDGrQscbNYLN57C9saD+veIR8GdwYDsMnvmfzAuU8L
+hij+0rnq49qlw0dpEuDb8PYZi+17cNcC1u2HGCgsBCRMd+RIihrGO5rUD8r6ddIB
+QFqNeb+Lz0vPqhbBleStTIo+F5HUsWLlguWABKQDfo2/2n+iD5dPDNMN+9fR5XJ+
+HMh3/1uaD7euBUbl8agW7EekFwIDAQABo4H2MIHzMIGwBgNVHREEgagwgaWBD2lu
+Zm9AaXplbnBlLmNvbaSBkTCBjjFHMEUGA1UECgw+SVpFTlBFIFMuQS4gLSBDSUYg
+QTAxMzM3MjYwLVJNZXJjLlZpdG9yaWEtR2FzdGVpeiBUMTA1NSBGNjIgUzgxQzBB
+BgNVBAkMOkF2ZGEgZGVsIE1lZGl0ZXJyYW5lbyBFdG9yYmlkZWEgMTQgLSAwMTAx
+MCBWaXRvcmlhLUdhc3RlaXowDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMC
+AQYwHQYDVR0OBBYEFB0cZQ6o8iV7tJHP5LGx5r1VdGwFMA0GCSqGSIb3DQEBCwUA
+A4ICAQB4pgwWSp9MiDrAyw6lFn2fuUhfGI8NYjb2zRlrrKvV9pF9rnHzP7MOeIWb
+laQnIUdCSnxIOvVFfLMMjlF4rJUT3sb9fbgakEyrkgPH7UIBzg/YsfqikuFgba56
+awmqxinuaElnMIAkejEWOVt+8Rwu3WwJrfIxwYJOubv5vr8qhT/AQKM6WfxZSzwo
+JNu0FXWuDYi6LnPAvViH5ULy617uHjAimcs30cQhbIHsvm0m5hzkQiCeR7Csg1lw
+LDXWrzY0tM07+DKo7+N4ifuNRSzanLh+QBxh5z6ikixL8s36mLYp//Pye6kfLqCT
+VyvehQP5aTfLnnhqBbTFMXiJ7HqnheG5ezzevh55hM6fcA5ZwjUukCox2eRFekGk
+LhObNA5me0mrZJfQRsN5nXJQY6aYWwa9SG3YOYNw6DXwBdGqvOPbyALqfP2C2sJb
+UjWumDqtujWTI6cfSN01RpiyEGjkpTHCClguGYEQyVB1/OpaFs4R1+7vUIgtYf8/
+QnMFlEPVjjxOAToZpR9GTnfQXeWBIiGH/pR9hNiTrdZoQ0iy2+tzJOeRf1SktoA+
+naM8THLCV8Sg1Mw4J87VBp6iSNnpn86CcDaTmjvfliHjWbcM2pE38P1ZWrOZyGls
+QyYBNWNgVYkDOnXYukrZVP/u3oDYLdE41V4tC5h9Pmzb/CaIxw==
+-----END CERTIFICATE-----
+
+# Issuer: CN=Go Daddy Root Certificate Authority - G2 O=GoDaddy.com, Inc.
+# Subject: CN=Go Daddy Root Certificate Authority - G2 O=GoDaddy.com, Inc.
+# Label: "Go Daddy Root Certificate Authority - G2"
+# Serial: 0
+# MD5 Fingerprint: 80:3a:bc:22:c1:e6:fb:8d:9b:3b:27:4a:32:1b:9a:01
+# SHA1 Fingerprint: 47:be:ab:c9:22:ea:e8:0e:78:78:34:62:a7:9f:45:c2:54:fd:e6:8b
+# SHA256 Fingerprint: 45:14:0b:32:47:eb:9c:c8:c5:b4:f0:d7:b5:30:91:f7:32:92:08:9e:6e:5a:63:e2:74:9d:d3:ac:a9:19:8e:da
+-----BEGIN CERTIFICATE-----
+MIIDxTCCAq2gAwIBAgIBADANBgkqhkiG9w0BAQsFADCBgzELMAkGA1UEBhMCVVMx
+EDAOBgNVBAgTB0FyaXpvbmExEzARBgNVBAcTClNjb3R0c2RhbGUxGjAYBgNVBAoT
+EUdvRGFkZHkuY29tLCBJbmMuMTEwLwYDVQQDEyhHbyBEYWRkeSBSb290IENlcnRp
+ZmljYXRlIEF1dGhvcml0eSAtIEcyMB4XDTA5MDkwMTAwMDAwMFoXDTM3MTIzMTIz
+NTk1OVowgYMxCzAJBgNVBAYTAlVTMRAwDgYDVQQIEwdBcml6b25hMRMwEQYDVQQH
+EwpTY290dHNkYWxlMRowGAYDVQQKExFHb0RhZGR5LmNvbSwgSW5jLjExMC8GA1UE
+AxMoR28gRGFkZHkgUm9vdCBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkgLSBHMjCCASIw
+DQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAL9xYgjx+lk09xvJGKP3gElY6SKD
+E6bFIEMBO4Tx5oVJnyfq9oQbTqC023CYxzIBsQU+B07u9PpPL1kwIuerGVZr4oAH
+/PMWdYA5UXvl+TW2dE6pjYIT5LY/qQOD+qK+ihVqf94Lw7YZFAXK6sOoBJQ7Rnwy
+DfMAZiLIjWltNowRGLfTshxgtDj6AozO091GB94KPutdfMh8+7ArU6SSYmlRJQVh
+GkSBjCypQ5Yj36w6gZoOKcUcqeldHraenjAKOc7xiID7S13MMuyFYkMlNAJWJwGR
+tDtwKj9useiciAF9n9T521NtYJ2/LOdYq7hfRvzOxBsDPAnrSTFcaUaz4EcCAwEA
+AaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYE
+FDqahQcQZyi27/a9BUFuIMGU2g/eMA0GCSqGSIb3DQEBCwUAA4IBAQCZ21151fmX
+WWcDYfF+OwYxdS2hII5PZYe096acvNjpL9DbWu7PdIxztDhC2gV7+AJ1uP2lsdeu
+9tfeE8tTEH6KRtGX+rcuKxGrkLAngPnon1rpN5+r5N9ss4UXnT3ZJE95kTXWXwTr
+gIOrmgIttRD02JDHBHNA7XIloKmf7J6raBKZV8aPEjoJpL1E/QYVN8Gb5DKj7Tjo
+2GTzLH4U/ALqn83/B2gX2yKQOC16jdFU8WnjXzPKej17CuPKf1855eJ1usV2GDPO
+LPAvTK33sefOT6jEm0pUBsV/fdUID+Ic/n4XuKxe9tQWskMJDE32p2u0mYRlynqI
+4uJEvlz36hz1
+-----END CERTIFICATE-----
+
+# Issuer: CN=Starfield Root Certificate Authority - G2 O=Starfield Technologies, Inc.
+# Subject: CN=Starfield Root Certificate Authority - G2 O=Starfield Technologies, Inc.
+# Label: "Starfield Root Certificate Authority - G2"
+# Serial: 0
+# MD5 Fingerprint: d6:39:81:c6:52:7e:96:69:fc:fc:ca:66:ed:05:f2:96
+# SHA1 Fingerprint: b5:1c:06:7c:ee:2b:0c:3d:f8:55:ab:2d:92:f4:fe:39:d4:e7:0f:0e
+# SHA256 Fingerprint: 2c:e1:cb:0b:f9:d2:f9:e1:02:99:3f:be:21:51:52:c3:b2:dd:0c:ab:de:1c:68:e5:31:9b:83:91:54:db:b7:f5
+-----BEGIN CERTIFICATE-----
+MIID3TCCAsWgAwIBAgIBADANBgkqhkiG9w0BAQsFADCBjzELMAkGA1UEBhMCVVMx
+EDAOBgNVBAgTB0FyaXpvbmExEzARBgNVBAcTClNjb3R0c2RhbGUxJTAjBgNVBAoT
+HFN0YXJmaWVsZCBUZWNobm9sb2dpZXMsIEluYy4xMjAwBgNVBAMTKVN0YXJmaWVs
+ZCBSb290IENlcnRpZmljYXRlIEF1dGhvcml0eSAtIEcyMB4XDTA5MDkwMTAwMDAw
+MFoXDTM3MTIzMTIzNTk1OVowgY8xCzAJBgNVBAYTAlVTMRAwDgYDVQQIEwdBcml6
+b25hMRMwEQYDVQQHEwpTY290dHNkYWxlMSUwIwYDVQQKExxTdGFyZmllbGQgVGVj
+aG5vbG9naWVzLCBJbmMuMTIwMAYDVQQDEylTdGFyZmllbGQgUm9vdCBDZXJ0aWZp
+Y2F0ZSBBdXRob3JpdHkgLSBHMjCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoC
+ggEBAL3twQP89o/8ArFvW59I2Z154qK3A2FWGMNHttfKPTUuiUP3oWmb3ooa/RMg
+nLRJdzIpVv257IzdIvpy3Cdhl+72WoTsbhm5iSzchFvVdPtrX8WJpRBSiUZV9Lh1
+HOZ/5FSuS/hVclcCGfgXcVnrHigHdMWdSL5stPSksPNkN3mSwOxGXn/hbVNMYq/N
+Hwtjuzqd+/x5AJhhdM8mgkBj87JyahkNmcrUDnXMN/uLicFZ8WJ/X7NfZTD4p7dN
+dloedl40wOiWVpmKs/B/pM293DIxfJHP4F8R+GuqSVzRmZTRouNjWwl2tVZi4Ut0
+HZbUJtQIBFnQmA4O5t78w+wfkPECAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAO
+BgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFHwMMh+n2TB/xH1oo2Kooc6rB1snMA0G
+CSqGSIb3DQEBCwUAA4IBAQARWfolTwNvlJk7mh+ChTnUdgWUXuEok21iXQnCoKjU
+sHU48TRqneSfioYmUeYs0cYtbpUgSpIB7LiKZ3sx4mcujJUDJi5DnUox9g61DLu3
+4jd/IroAow57UvtruzvE03lRTs2Q9GcHGcg8RnoNAX3FWOdt5oUwF5okxBDgBPfg
+8n/Uqgr/Qh037ZTlZFkSIHc40zI+OIF1lnP6aI+xy84fxez6nH7PfrHxBy22/L/K
+pL/QlwVKvOoYKAKQvVR4CSFx09F9HdkWsKlhPdAKACL8x3vLCWRFCztAgfd9fDL1
+mMpYjn0q7pBZc2T5NnReJaH1ZgUufzkVqSr7UIuOhWn0
+-----END CERTIFICATE-----
+
+# Issuer: CN=Starfield Services Root Certificate Authority - G2 O=Starfield Technologies, Inc.
+# Subject: CN=Starfield Services Root Certificate Authority - G2 O=Starfield Technologies, Inc.
+# Label: "Starfield Services Root Certificate Authority - G2"
+# Serial: 0
+# MD5 Fingerprint: 17:35:74:af:7b:61:1c:eb:f4:f9:3c:e2:ee:40:f9:a2
+# SHA1 Fingerprint: 92:5a:8f:8d:2c:6d:04:e0:66:5f:59:6a:ff:22:d8:63:e8:25:6f:3f
+# SHA256 Fingerprint: 56:8d:69:05:a2:c8:87:08:a4:b3:02:51:90:ed:cf:ed:b1:97:4a:60:6a:13:c6:e5:29:0f:cb:2a:e6:3e:da:b5
+-----BEGIN CERTIFICATE-----
+MIID7zCCAtegAwIBAgIBADANBgkqhkiG9w0BAQsFADCBmDELMAkGA1UEBhMCVVMx
+EDAOBgNVBAgTB0FyaXpvbmExEzARBgNVBAcTClNjb3R0c2RhbGUxJTAjBgNVBAoT
+HFN0YXJmaWVsZCBUZWNobm9sb2dpZXMsIEluYy4xOzA5BgNVBAMTMlN0YXJmaWVs
+ZCBTZXJ2aWNlcyBSb290IENlcnRpZmljYXRlIEF1dGhvcml0eSAtIEcyMB4XDTA5
+MDkwMTAwMDAwMFoXDTM3MTIzMTIzNTk1OVowgZgxCzAJBgNVBAYTAlVTMRAwDgYD
+VQQIEwdBcml6b25hMRMwEQYDVQQHEwpTY290dHNkYWxlMSUwIwYDVQQKExxTdGFy
+ZmllbGQgVGVjaG5vbG9naWVzLCBJbmMuMTswOQYDVQQDEzJTdGFyZmllbGQgU2Vy
+dmljZXMgUm9vdCBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkgLSBHMjCCASIwDQYJKoZI
+hvcNAQEBBQADggEPADCCAQoCggEBANUMOsQq+U7i9b4Zl1+OiFOxHz/Lz58gE20p
+OsgPfTz3a3Y4Y9k2YKibXlwAgLIvWX/2h/klQ4bnaRtSmpDhcePYLQ1Ob/bISdm2
+8xpWriu2dBTrz/sm4xq6HZYuajtYlIlHVv8loJNwU4PahHQUw2eeBGg6345AWh1K
+Ts9DkTvnVtYAcMtS7nt9rjrnvDH5RfbCYM8TWQIrgMw0R9+53pBlbQLPLJGmpufe
+hRhJfGZOozptqbXuNC66DQO4M99H67FrjSXZm86B0UVGMpZwh94CDklDhbZsc7tk
+6mFBrMnUVN+HL8cisibMn1lUaJ/8viovxFUcdUBgF4UCVTmLfwUCAwEAAaNCMEAw
+DwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFJxfAN+q
+AdcwKziIorhtSpzyEZGDMA0GCSqGSIb3DQEBCwUAA4IBAQBLNqaEd2ndOxmfZyMI
+bw5hyf2E3F/YNoHN2BtBLZ9g3ccaaNnRbobhiCPPE95Dz+I0swSdHynVv/heyNXB
+ve6SbzJ08pGCL72CQnqtKrcgfU28elUSwhXqvfdqlS5sdJ/PHLTyxQGjhdByPq1z
+qwubdQxtRbeOlKyWN7Wg0I8VRw7j6IPdj/3vQQF3zCepYoUz8jcI73HPdwbeyBkd
+iEDPfUYd/x7H4c7/I9vG+o1VTqkC50cRRj70/b17KSa7qWFiNyi2LSr2EIZkyXCn
+0q23KXB56jzaYyWf/Wi3MOxw+3WKt21gZ7IeyLnp2KhvAotnDU0mV3HaIPzBSlCN
+sSi6
+-----END CERTIFICATE-----
+
+# Issuer: CN=AffirmTrust Commercial O=AffirmTrust
+# Subject: CN=AffirmTrust Commercial O=AffirmTrust
+# Label: "AffirmTrust Commercial"
+# Serial: 8608355977964138876
+# MD5 Fingerprint: 82:92:ba:5b:ef:cd:8a:6f:a6:3d:55:f9:84:f6:d6:b7
+# SHA1 Fingerprint: f9:b5:b6:32:45:5f:9c:be:ec:57:5f:80:dc:e9:6e:2c:c7:b2:78:b7
+# SHA256 Fingerprint: 03:76:ab:1d:54:c5:f9:80:3c:e4:b2:e2:01:a0:ee:7e:ef:7b:57:b6:36:e8:a9:3c:9b:8d:48:60:c9:6f:5f:a7
+-----BEGIN CERTIFICATE-----
+MIIDTDCCAjSgAwIBAgIId3cGJyapsXwwDQYJKoZIhvcNAQELBQAwRDELMAkGA1UE
+BhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZpcm1UcnVz
+dCBDb21tZXJjaWFsMB4XDTEwMDEyOTE0MDYwNloXDTMwMTIzMTE0MDYwNlowRDEL
+MAkGA1UEBhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZp
+cm1UcnVzdCBDb21tZXJjaWFsMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKC
+AQEA9htPZwcroRX1BiLLHwGy43NFBkRJLLtJJRTWzsO3qyxPxkEylFf6EqdbDuKP
+Hx6GGaeqtS25Xw2Kwq+FNXkyLbscYjfysVtKPcrNcV/pQr6U6Mje+SJIZMblq8Yr
+ba0F8PrVC8+a5fBQpIs7R6UjW3p6+DM/uO+Zl+MgwdYoic+U+7lF7eNAFxHUdPAL
+MeIrJmqbTFeurCA+ukV6BfO9m2kVrn1OIGPENXY6BwLJN/3HR+7o8XYdcxXyl6S1
+yHp52UKqK39c/s4mT6NmgTWvRLpUHhwwMmWd5jyTXlBOeuM61G7MGvv50jeuJCqr
+VwMiKA1JdX+3KNp1v47j3A55MQIDAQABo0IwQDAdBgNVHQ4EFgQUnZPGU4teyq8/
+nx4P5ZmVvCT2lI8wDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwDQYJ
+KoZIhvcNAQELBQADggEBAFis9AQOzcAN/wr91LoWXym9e2iZWEnStB03TX8nfUYG
+XUPGhi4+c7ImfU+TqbbEKpqrIZcUsd6M06uJFdhrJNTxFq7YpFzUf1GO7RgBsZNj
+vbz4YYCanrHOQnDiqX0GJX0nof5v7LMeJNrjS1UaADs1tDvZ110w/YETifLCBivt
+Z8SOyUOyXGsViQK8YvxO8rUzqrJv0wqiUOP2O+guRMLbZjipM1ZI8W0bM40NjD9g
+N53Tym1+NH4Nn3J2ixufcv1SNUFFApYvHLKac0khsUlHRUe072o0EclNmsxZt9YC
+nlpOZbWUrhvfKbAW8b8Angc6F2S1BLUjIZkKlTuXfO8=
+-----END CERTIFICATE-----
+
+# Issuer: CN=AffirmTrust Networking O=AffirmTrust
+# Subject: CN=AffirmTrust Networking O=AffirmTrust
+# Label: "AffirmTrust Networking"
+# Serial: 8957382827206547757
+# MD5 Fingerprint: 42:65:ca:be:01:9a:9a:4c:a9:8c:41:49:cd:c0:d5:7f
+# SHA1 Fingerprint: 29:36:21:02:8b:20:ed:02:f5:66:c5:32:d1:d6:ed:90:9f:45:00:2f
+# SHA256 Fingerprint: 0a:81:ec:5a:92:97:77:f1:45:90:4a:f3:8d:5d:50:9f:66:b5:e2:c5:8f:cd:b5:31:05:8b:0e:17:f3:f0:b4:1b
+-----BEGIN CERTIFICATE-----
+MIIDTDCCAjSgAwIBAgIIfE8EORzUmS0wDQYJKoZIhvcNAQEFBQAwRDELMAkGA1UE
+BhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZpcm1UcnVz
+dCBOZXR3b3JraW5nMB4XDTEwMDEyOTE0MDgyNFoXDTMwMTIzMTE0MDgyNFowRDEL
+MAkGA1UEBhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZp
+cm1UcnVzdCBOZXR3b3JraW5nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKC
+AQEAtITMMxcua5Rsa2FSoOujz3mUTOWUgJnLVWREZY9nZOIG41w3SfYvm4SEHi3y
+YJ0wTsyEheIszx6e/jarM3c1RNg1lho9Nuh6DtjVR6FqaYvZ/Ls6rnla1fTWcbua
+kCNrmreIdIcMHl+5ni36q1Mr3Lt2PpNMCAiMHqIjHNRqrSK6mQEubWXLviRmVSRL
+QESxG9fhwoXA3hA/Pe24/PHxI1Pcv2WXb9n5QHGNfb2V1M6+oF4nI979ptAmDgAp
+6zxG8D1gvz9Q0twmQVGeFDdCBKNwV6gbh+0t+nvujArjqWaJGctB+d1ENmHP4ndG
+yH329JKBNv3bNPFyfvMMFr20FQIDAQABo0IwQDAdBgNVHQ4EFgQUBx/S55zawm6i
+QLSwelAQUHTEyL0wDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwDQYJ
+KoZIhvcNAQEFBQADggEBAIlXshZ6qML91tmbmzTCnLQyFE2npN/svqe++EPbkTfO
+tDIuUFUaNU52Q3Eg75N3ThVwLofDwR1t3Mu1J9QsVtFSUzpE0nPIxBsFZVpikpzu
+QY0x2+c06lkh1QF612S4ZDnNye2v7UsDSKegmQGA3GWjNq5lWUhPgkvIZfFXHeVZ
+Lgo/bNjR9eUJtGxUAArgFU2HdW23WJZa3W3SAKD0m0i+wzekujbgfIeFlxoVot4u
+olu9rxj5kFDNcFn4J2dHy8egBzp90SxdbBk6ZrV9/ZFvgrG+CJPbFEfxojfHRZ48
+x3evZKiT3/Zpg4Jg8klCNO1aAFSFHBY2kgxc+qatv9s=
+-----END CERTIFICATE-----
+
+# Issuer: CN=AffirmTrust Premium O=AffirmTrust
+# Subject: CN=AffirmTrust Premium O=AffirmTrust
+# Label: "AffirmTrust Premium"
+# Serial: 7893706540734352110
+# MD5 Fingerprint: c4:5d:0e:48:b6:ac:28:30:4e:0a:bc:f9:38:16:87:57
+# SHA1 Fingerprint: d8:a6:33:2c:e0:03:6f:b1:85:f6:63:4f:7d:6a:06:65:26:32:28:27
+# SHA256 Fingerprint: 70:a7:3f:7f:37:6b:60:07:42:48:90:45:34:b1:14:82:d5:bf:0e:69:8e:cc:49:8d:f5:25:77:eb:f2:e9:3b:9a
+-----BEGIN CERTIFICATE-----
+MIIFRjCCAy6gAwIBAgIIbYwURrGmCu4wDQYJKoZIhvcNAQEMBQAwQTELMAkGA1UE
+BhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MRwwGgYDVQQDDBNBZmZpcm1UcnVz
+dCBQcmVtaXVtMB4XDTEwMDEyOTE0MTAzNloXDTQwMTIzMTE0MTAzNlowQTELMAkG
+A1UEBhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MRwwGgYDVQQDDBNBZmZpcm1U
+cnVzdCBQcmVtaXVtMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAxBLf
+qV/+Qd3d9Z+K4/as4Tx4mrzY8H96oDMq3I0gW64tb+eT2TZwamjPjlGjhVtnBKAQ
+JG9dKILBl1fYSCkTtuG+kU3fhQxTGJoeJKJPj/CihQvL9Cl/0qRY7iZNyaqoe5rZ
++jjeRFcV5fiMyNlI4g0WJx0eyIOFJbe6qlVBzAMiSy2RjYvmia9mx+n/K+k8rNrS
+s8PhaJyJ+HoAVt70VZVs+7pk3WKL3wt3MutizCaam7uqYoNMtAZ6MMgpv+0GTZe5
+HMQxK9VfvFMSF5yZVylmd2EhMQcuJUmdGPLu8ytxjLW6OQdJd/zvLpKQBY0tL3d7
+70O/Nbua2Plzpyzy0FfuKE4mX4+QaAkvuPjcBukumj5Rp9EixAqnOEhss/n/fauG
+V+O61oV4d7pD6kh/9ti+I20ev9E2bFhc8e6kGVQa9QPSdubhjL08s9NIS+LI+H+S
+qHZGnEJlPqQewQcDWkYtuJfzt9WyVSHvutxMAJf7FJUnM7/oQ0dG0giZFmA7mn7S
+5u046uwBHjxIVkkJx0w3AJ6IDsBz4W9m6XJHMD4Q5QsDyZpCAGzFlH5hxIrff4Ia
+C1nEWTJ3s7xgaVY5/bQGeyzWZDbZvUjthB9+pSKPKrhC9IK31FOQeE4tGv2Bb0TX
+OwF0lkLgAOIua+rF7nKsu7/+6qqo+Nz2snmKtmcCAwEAAaNCMEAwHQYDVR0OBBYE
+FJ3AZ6YMItkm9UWrpmVSESfYRaxjMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/
+BAQDAgEGMA0GCSqGSIb3DQEBDAUAA4ICAQCzV00QYk465KzquByvMiPIs0laUZx2
+KI15qldGF9X1Uva3ROgIRL8YhNILgM3FEv0AVQVhh0HctSSePMTYyPtwni94loMg
+Nt58D2kTiKV1NpgIpsbfrM7jWNa3Pt668+s0QNiigfV4Py/VpfzZotReBA4Xrf5B
+8OWycvpEgjNC6C1Y91aMYj+6QrCcDFx+LmUmXFNPALJ4fqENmS2NuB2OosSw/WDQ
+MKSOyARiqcTtNd56l+0OOF6SL5Nwpamcb6d9Ex1+xghIsV5n61EIJenmJWtSKZGc
+0jlzCFfemQa0W50QBuHCAKi4HEoCChTQwUHK+4w1IX2COPKpVJEZNZOUbWo6xbLQ
+u4mGk+ibyQ86p3q4ofB4Rvr8Ny/lioTz3/4E2aFooC8k4gmVBtWVyuEklut89pMF
+u+1z6S3RdTnX5yTb2E5fQ4+e0BQ5v1VwSJlXMbSc7kqYA5YwH2AG7hsj/oFgIxpH
+YoWlzBk0gG+zrBrjn/B7SK3VAdlntqlyk+otZrWyuOQ9PLLvTIzq6we/qzWaVYa8
+GKa1qF60g2xraUDTn9zxw2lrueFtCfTxqlB2Cnp9ehehVZZCmTEJ3WARjQUwfuaO
+RtGdFNrHF+QFlozEJLUbzxQHskD4o55BhrwE0GuWyCqANP2/7waj3VjFhT0+j/6e
+KeC2uAloGRwYQw==
+-----END CERTIFICATE-----
+
+# Issuer: CN=AffirmTrust Premium ECC O=AffirmTrust
+# Subject: CN=AffirmTrust Premium ECC O=AffirmTrust
+# Label: "AffirmTrust Premium ECC"
+# Serial: 8401224907861490260
+# MD5 Fingerprint: 64:b0:09:55:cf:b1:d5:99:e2:be:13:ab:a6:5d:ea:4d
+# SHA1 Fingerprint: b8:23:6b:00:2f:1d:16:86:53:01:55:6c:11:a4:37:ca:eb:ff:c3:bb
+# SHA256 Fingerprint: bd:71:fd:f6:da:97:e4:cf:62:d1:64:7a:dd:25:81:b0:7d:79:ad:f8:39:7e:b4:ec:ba:9c:5e:84:88:82:14:23
+-----BEGIN CERTIFICATE-----
+MIIB/jCCAYWgAwIBAgIIdJclisc/elQwCgYIKoZIzj0EAwMwRTELMAkGA1UEBhMC
+VVMxFDASBgNVBAoMC0FmZmlybVRydXN0MSAwHgYDVQQDDBdBZmZpcm1UcnVzdCBQ
+cmVtaXVtIEVDQzAeFw0xMDAxMjkxNDIwMjRaFw00MDEyMzExNDIwMjRaMEUxCzAJ
+BgNVBAYTAlVTMRQwEgYDVQQKDAtBZmZpcm1UcnVzdDEgMB4GA1UEAwwXQWZmaXJt
+VHJ1c3QgUHJlbWl1bSBFQ0MwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAAQNMF4bFZ0D
+0KF5Nbc6PJJ6yhUczWLznCZcBz3lVPqj1swS6vQUX+iOGasvLkjmrBhDeKzQN8O9
+ss0s5kfiGuZjuD0uL3jET9v0D6RoTFVya5UdThhClXjMNzyR4ptlKymjQjBAMB0G
+A1UdDgQWBBSaryl6wBE1NSZRMADDav5A1a7WPDAPBgNVHRMBAf8EBTADAQH/MA4G
+A1UdDwEB/wQEAwIBBjAKBggqhkjOPQQDAwNnADBkAjAXCfOHiFBar8jAQr9HX/Vs
+aobgxCd05DhT1wV/GzTjxi+zygk8N53X57hG8f2h4nECMEJZh0PUUd+60wkyWs6I
+flc9nF9Ca/UHLbXwgpP5WW+uZPpY5Yse42O+tYHNbwKMeQ==
+-----END CERTIFICATE-----
+
+# Issuer: CN=Certum Trusted Network CA O=Unizeto Technologies S.A. OU=Certum Certification Authority
+# Subject: CN=Certum Trusted Network CA O=Unizeto Technologies S.A. OU=Certum Certification Authority
+# Label: "Certum Trusted Network CA"
+# Serial: 279744
+# MD5 Fingerprint: d5:e9:81:40:c5:18:69:fc:46:2c:89:75:62:0f:aa:78
+# SHA1 Fingerprint: 07:e0:32:e0:20:b7:2c:3f:19:2f:06:28:a2:59:3a:19:a7:0f:06:9e
+# SHA256 Fingerprint: 5c:58:46:8d:55:f5:8e:49:7e:74:39:82:d2:b5:00:10:b6:d1:65:37:4a:cf:83:a7:d4:a3:2d:b7:68:c4:40:8e
+-----BEGIN CERTIFICATE-----
+MIIDuzCCAqOgAwIBAgIDBETAMA0GCSqGSIb3DQEBBQUAMH4xCzAJBgNVBAYTAlBM
+MSIwIAYDVQQKExlVbml6ZXRvIFRlY2hub2xvZ2llcyBTLkEuMScwJQYDVQQLEx5D
+ZXJ0dW0gQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkxIjAgBgNVBAMTGUNlcnR1bSBU
+cnVzdGVkIE5ldHdvcmsgQ0EwHhcNMDgxMDIyMTIwNzM3WhcNMjkxMjMxMTIwNzM3
+WjB+MQswCQYDVQQGEwJQTDEiMCAGA1UEChMZVW5pemV0byBUZWNobm9sb2dpZXMg
+Uy5BLjEnMCUGA1UECxMeQ2VydHVtIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MSIw
+IAYDVQQDExlDZXJ0dW0gVHJ1c3RlZCBOZXR3b3JrIENBMIIBIjANBgkqhkiG9w0B
+AQEFAAOCAQ8AMIIBCgKCAQEA4/t9o3K6wvDJFIf1awFO4W5AB7ptJ11/91sts1rH
+UV+rpDKmYYe2bg+G0jACl/jXaVehGDldamR5xgFZrDwxSjh80gTSSyjoIF87B6LM
+TXPb865Px1bVWqeWifrzq2jUI4ZZJ88JJ7ysbnKDHDBy3+Ci6dLhdHUZvSqeexVU
+BBvXQzmtVSjF4hq79MDkrjhJM8x2hZ85RdKknvISjFH4fOQtf/WsX+sWn7Et0brM
+kUJ3TCXJkDhv2/DM+44el1k+1WBO5gUo7Ul5E0u6SNsv+XLTOcr+H9g0cvW0QM8x
+AcPs3hEtF10fuFDRXhmnad4HMyjKUJX5p1TLVIZQRan5SQIDAQABo0IwQDAPBgNV
+HRMBAf8EBTADAQH/MB0GA1UdDgQWBBQIds3LB/8k9sXN7buQvOKEN0Z19zAOBgNV
+HQ8BAf8EBAMCAQYwDQYJKoZIhvcNAQEFBQADggEBAKaorSLOAT2mo/9i0Eidi15y
+sHhE49wcrwn9I0j6vSrEuVUEtRCjjSfeC4Jj0O7eDDd5QVsisrCaQVymcODU0HfL
+I9MA4GxWL+FpDQ3Zqr8hgVDZBqWo/5U30Kr+4rP1mS1FhIrlQgnXdAIv94nYmem8
+J9RHjboNRhx3zxSkHLmkMcScKHQDNP8zGSal6Q10tz6XxnboJ5ajZt3hrvJBW8qY
+VoNzcOSGGtIxQbovvi0TWnZvTuhOgQ4/WwMioBK+ZlgRSssDxLQqKi2WF+A5VLxI
+03YnnZotBqbJ7DnSq9ufmgsnAjUpsUCV5/nonFWIGUbWtzT1fs45mtk48VH3Tyw=
+-----END CERTIFICATE-----
+
+# Issuer: CN=TWCA Root Certification Authority O=TAIWAN-CA OU=Root CA
+# Subject: CN=TWCA Root Certification Authority O=TAIWAN-CA OU=Root CA
+# Label: "TWCA Root Certification Authority"
+# Serial: 1
+# MD5 Fingerprint: aa:08:8f:f6:f9:7b:b7:f2:b1:a7:1e:9b:ea:ea:bd:79
+# SHA1 Fingerprint: cf:9e:87:6d:d3:eb:fc:42:26:97:a3:b5:a3:7a:a0:76:a9:06:23:48
+# SHA256 Fingerprint: bf:d8:8f:e1:10:1c:41:ae:3e:80:1b:f8:be:56:35:0e:e9:ba:d1:a6:b9:bd:51:5e:dc:5c:6d:5b:87:11:ac:44
+-----BEGIN CERTIFICATE-----
+MIIDezCCAmOgAwIBAgIBATANBgkqhkiG9w0BAQUFADBfMQswCQYDVQQGEwJUVzES
+MBAGA1UECgwJVEFJV0FOLUNBMRAwDgYDVQQLDAdSb290IENBMSowKAYDVQQDDCFU
+V0NBIFJvb3QgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDgwODI4MDcyNDMz
+WhcNMzAxMjMxMTU1OTU5WjBfMQswCQYDVQQGEwJUVzESMBAGA1UECgwJVEFJV0FO
+LUNBMRAwDgYDVQQLDAdSb290IENBMSowKAYDVQQDDCFUV0NBIFJvb3QgQ2VydGlm
+aWNhdGlvbiBBdXRob3JpdHkwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIB
+AQCwfnK4pAOU5qfeCTiRShFAh6d8WWQUe7UREN3+v9XAu1bihSX0NXIP+FPQQeFE
+AcK0HMMxQhZHhTMidrIKbw/lJVBPhYa+v5guEGcevhEFhgWQxFnQfHgQsIBct+HH
+K3XLfJ+utdGdIzdjp9xCoi2SBBtQwXu4PhvJVgSLL1KbralW6cH/ralYhzC2gfeX
+RfwZVzsrb+RH9JlF/h3x+JejiB03HFyP4HYlmlD4oFT/RJB2I9IyxsOrBr/8+7/z
+rX2SYgJbKdM1o5OaQ2RgXbL6Mv87BK9NQGr5x+PvI/1ry+UPizgN7gr8/g+YnzAx
+3WxSZfmLgb4i4RxYA7qRG4kHAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIBBjAPBgNV
+HRMBAf8EBTADAQH/MB0GA1UdDgQWBBRqOFsmjd6LWvJPelSDGRjjCDWmujANBgkq
+hkiG9w0BAQUFAAOCAQEAPNV3PdrfibqHDAhUaiBQkr6wQT25JmSDCi/oQMCXKCeC
+MErJk/9q56YAf4lCmtYR5VPOL8zy2gXE/uJQxDqGfczafhAJO5I1KlOy/usrBdls
+XebQ79NqZp4VKIV66IIArB6nCWlWQtNoURi+VJq/REG6Sb4gumlc7rh3zc5sH62D
+lhh9DrUUOYTxKOkto557HnpyWoOzeW/vtPzQCqVYT0bf+215WfKEIlKuD8z7fDvn
+aspHYcN6+NOSBB+4IIThNlQWx0DeO4pz3N/GCUzf7Nr/1FNCocnyYh0igzyXxfkZ
+YiesZSLX0zzG5Y6yU8xJzrww/nsOM5D77dIUkR8Hrw==
+-----END CERTIFICATE-----
+
+# Issuer: O=SECOM Trust Systems CO.,LTD. OU=Security Communication RootCA2
+# Subject: O=SECOM Trust Systems CO.,LTD. OU=Security Communication RootCA2
+# Label: "Security Communication RootCA2"
+# Serial: 0
+# MD5 Fingerprint: 6c:39:7d:a4:0e:55:59:b2:3f:d6:41:b1:12:50:de:43
+# SHA1 Fingerprint: 5f:3b:8c:f2:f8:10:b3:7d:78:b4:ce:ec:19:19:c3:73:34:b9:c7:74
+# SHA256 Fingerprint: 51:3b:2c:ec:b8:10:d4:cd:e5:dd:85:39:1a:df:c6:c2:dd:60:d8:7b:b7:36:d2:b5:21:48:4a:a4:7a:0e:be:f6
+-----BEGIN CERTIFICATE-----
+MIIDdzCCAl+gAwIBAgIBADANBgkqhkiG9w0BAQsFADBdMQswCQYDVQQGEwJKUDEl
+MCMGA1UEChMcU0VDT00gVHJ1c3QgU3lzdGVtcyBDTy4sTFRELjEnMCUGA1UECxMe
+U2VjdXJpdHkgQ29tbXVuaWNhdGlvbiBSb290Q0EyMB4XDTA5MDUyOTA1MDAzOVoX
+DTI5MDUyOTA1MDAzOVowXTELMAkGA1UEBhMCSlAxJTAjBgNVBAoTHFNFQ09NIFRy
+dXN0IFN5c3RlbXMgQ08uLExURC4xJzAlBgNVBAsTHlNlY3VyaXR5IENvbW11bmlj
+YXRpb24gUm9vdENBMjCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBANAV
+OVKxUrO6xVmCxF1SrjpDZYBLx/KWvNs2l9amZIyoXvDjChz335c9S672XewhtUGr
+zbl+dp+++T42NKA7wfYxEUV0kz1XgMX5iZnK5atq1LXaQZAQwdbWQonCv/Q4EpVM
+VAX3NuRFg3sUZdbcDE3R3n4MqzvEFb46VqZab3ZpUql6ucjrappdUtAtCms1FgkQ
+hNBqyjoGADdH5H5XTz+L62e4iKrFvlNVspHEfbmwhRkGeC7bYRr6hfVKkaHnFtWO
+ojnflLhwHyg/i/xAXmODPIMqGplrz95Zajv8bxbXH/1KEOtOghY6rCcMU/Gt1SSw
+awNQwS08Ft1ENCcadfsCAwEAAaNCMEAwHQYDVR0OBBYEFAqFqXdlBZh8QIH4D5cs
+OPEK7DzPMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8EBTADAQH/MA0GCSqGSIb3
+DQEBCwUAA4IBAQBMOqNErLlFsceTfsgLCkLfZOoc7llsCLqJX2rKSpWeeo8HxdpF
+coJxDjrSzG+ntKEju/Ykn8sX/oymzsLS28yN/HH8AynBbF0zX2S2ZTuJbxh2ePXc
+okgfGT+Ok+vx+hfuzU7jBBJV1uXk3fs+BXziHV7Gp7yXT2g69ekuCkO2r1dcYmh8
+t/2jioSgrGK+KwmHNPBqAbubKVY8/gA3zyNs8U6qtnRGEmyR7jTV7JqR50S+kDFy
+1UkC9gLl9B/rfNmWVan/7Ir5mUf/NVoCqgTLiluHcSmRvaS0eg29mvVXIwAHIRc/
+SjnRBUkLp7Y3gaVdjKozXoEofKd9J+sAro03
+-----END CERTIFICATE-----
+
+# Issuer: CN=Actalis Authentication Root CA O=Actalis S.p.A./03358520967
+# Subject: CN=Actalis Authentication Root CA O=Actalis S.p.A./03358520967
+# Label: "Actalis Authentication Root CA"
+# Serial: 6271844772424770508
+# MD5 Fingerprint: 69:c1:0d:4f:07:a3:1b:c3:fe:56:3d:04:bc:11:f6:a6
+# SHA1 Fingerprint: f3:73:b3:87:06:5a:28:84:8a:f2:f3:4a:ce:19:2b:dd:c7:8e:9c:ac
+# SHA256 Fingerprint: 55:92:60:84:ec:96:3a:64:b9:6e:2a:be:01:ce:0b:a8:6a:64:fb:fe:bc:c7:aa:b5:af:c1:55:b3:7f:d7:60:66
+-----BEGIN CERTIFICATE-----
+MIIFuzCCA6OgAwIBAgIIVwoRl0LE48wwDQYJKoZIhvcNAQELBQAwazELMAkGA1UE
+BhMCSVQxDjAMBgNVBAcMBU1pbGFuMSMwIQYDVQQKDBpBY3RhbGlzIFMucC5BLi8w
+MzM1ODUyMDk2NzEnMCUGA1UEAwweQWN0YWxpcyBBdXRoZW50aWNhdGlvbiBSb290
+IENBMB4XDTExMDkyMjExMjIwMloXDTMwMDkyMjExMjIwMlowazELMAkGA1UEBhMC
+SVQxDjAMBgNVBAcMBU1pbGFuMSMwIQYDVQQKDBpBY3RhbGlzIFMucC5BLi8wMzM1
+ODUyMDk2NzEnMCUGA1UEAwweQWN0YWxpcyBBdXRoZW50aWNhdGlvbiBSb290IENB
+MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAp8bEpSmkLO/lGMWwUKNv
+UTufClrJwkg4CsIcoBh/kbWHuUA/3R1oHwiD1S0eiKD4j1aPbZkCkpAW1V8IbInX
+4ay8IMKx4INRimlNAJZaby/ARH6jDuSRzVju3PvHHkVH3Se5CAGfpiEd9UEtL0z9
+KK3giq0itFZljoZUj5NDKd45RnijMCO6zfB9E1fAXdKDa0hMxKufgFpbOr3JpyI/
+gCczWw63igxdBzcIy2zSekciRDXFzMwujt0q7bd9Zg1fYVEiVRvjRuPjPdA1Yprb
+rxTIW6HMiRvhMCb8oJsfgadHHwTrozmSBp+Z07/T6k9QnBn+locePGX2oxgkg4YQ
+51Q+qDp2JE+BIcXjDwL4k5RHILv+1A7TaLndxHqEguNTVHnd25zS8gebLra8Pu2F
+be8lEfKXGkJh90qX6IuxEAf6ZYGyojnP9zz/GPvG8VqLWeICrHuS0E4UT1lF9gxe
+KF+w6D9Fz8+vm2/7hNN3WpVvrJSEnu68wEqPSpP4RCHiMUVhUE4Q2OM1fEwZtN4F
+v6MGn8i1zeQf1xcGDXqVdFUNaBr8EBtiZJ1t4JWgw5QHVw0U5r0F+7if5t+L4sbn
+fpb2U8WANFAoWPASUHEXMLrmeGO89LKtmyuy/uE5jF66CyCU3nuDuP/jVo23Eek7
+jPKxwV2dpAtMK9myGPW1n0sCAwEAAaNjMGEwHQYDVR0OBBYEFFLYiDrIn3hm7Ynz
+ezhwlMkCAjbQMA8GA1UdEwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAUUtiIOsifeGbt
+ifN7OHCUyQICNtAwDgYDVR0PAQH/BAQDAgEGMA0GCSqGSIb3DQEBCwUAA4ICAQAL
+e3KHwGCmSUyIWOYdiPcUZEim2FgKDk8TNd81HdTtBjHIgT5q1d07GjLukD0R0i70
+jsNjLiNmsGe+b7bAEzlgqqI0JZN1Ut6nna0Oh4lScWoWPBkdg/iaKWW+9D+a2fDz
+WochcYBNy+A4mz+7+uAwTc+G02UQGRjRlwKxK3JCaKygvU5a2hi/a5iB0P2avl4V
+SM0RFbnAKVy06Ij3Pjaut2L9HmLecHgQHEhb2rykOLpn7VU+Xlff1ANATIGk0k9j
+pwlCCRT8AKnCgHNPLsBA2RF7SOp6AsDT6ygBJlh0wcBzIm2Tlf05fbsq4/aC4yyX
+X04fkZT6/iyj2HYauE2yOE+b+h1IYHkm4vP9qdCa6HCPSXrW5b0KDtst842/6+Ok
+fcvHlXHo2qN8xcL4dJIEG4aspCJTQLas/kx2z/uUMsA1n3Y/buWQbqCmJqK4LL7R
+K4X9p2jIugErsWx0Hbhzlefut8cl8ABMALJ+tguLHPPAUJ4lueAI3jZm/zel0btU
+ZCzJJ7VLkn5l/9Mt4blOvH+kQSGQQXemOR/qnuOf0GZvBeyqdn6/axag67XH/JJU
+LysRJyU3eExRarDzzFhdFPFqSBX/wge2sY0PjlxQRrM9vwGYT7JZVEc+NHt4bVaT
+LnPqZih4zR0Uv6CPLy64Lo7yFIrM6bV8+2ydDKXhlg==
+-----END CERTIFICATE-----
+
+# Issuer: CN=Buypass Class 2 Root CA O=Buypass AS-983163327
+# Subject: CN=Buypass Class 2 Root CA O=Buypass AS-983163327
+# Label: "Buypass Class 2 Root CA"
+# Serial: 2
+# MD5 Fingerprint: 46:a7:d2:fe:45:fb:64:5a:a8:59:90:9b:78:44:9b:29
+# SHA1 Fingerprint: 49:0a:75:74:de:87:0a:47:fe:58:ee:f6:c7:6b:eb:c6:0b:12:40:99
+# SHA256 Fingerprint: 9a:11:40:25:19:7c:5b:b9:5d:94:e6:3d:55:cd:43:79:08:47:b6:46:b2:3c:df:11:ad:a4:a0:0e:ff:15:fb:48
+-----BEGIN CERTIFICATE-----
+MIIFWTCCA0GgAwIBAgIBAjANBgkqhkiG9w0BAQsFADBOMQswCQYDVQQGEwJOTzEd
+MBsGA1UECgwUQnV5cGFzcyBBUy05ODMxNjMzMjcxIDAeBgNVBAMMF0J1eXBhc3Mg
+Q2xhc3MgMiBSb290IENBMB4XDTEwMTAyNjA4MzgwM1oXDTQwMTAyNjA4MzgwM1ow
+TjELMAkGA1UEBhMCTk8xHTAbBgNVBAoMFEJ1eXBhc3MgQVMtOTgzMTYzMzI3MSAw
+HgYDVQQDDBdCdXlwYXNzIENsYXNzIDIgUm9vdCBDQTCCAiIwDQYJKoZIhvcNAQEB
+BQADggIPADCCAgoCggIBANfHXvfBB9R3+0Mh9PT1aeTuMgHbo4Yf5FkNuud1g1Lr
+6hxhFUi7HQfKjK6w3Jad6sNgkoaCKHOcVgb/S2TwDCo3SbXlzwx87vFKu3MwZfPV
+L4O2fuPn9Z6rYPnT8Z2SdIrkHJasW4DptfQxh6NR/Md+oW+OU3fUl8FVM5I+GC91
+1K2GScuVr1QGbNgGE41b/+EmGVnAJLqBcXmQRFBoJJRfuLMR8SlBYaNByyM21cHx
+MlAQTn/0hpPshNOOvEu/XAFOBz3cFIqUCqTqc/sLUegTBxj6DvEr0VQVfTzh97QZ
+QmdiXnfgolXsttlpF9U6r0TtSsWe5HonfOV116rLJeffawrbD02TTqigzXsu8lkB
+arcNuAeBfos4GzjmCleZPe4h6KP1DBbdi+w0jpwqHAAVF41og9JwnxgIzRFo1clr
+Us3ERo/ctfPYV3Me6ZQ5BL/T3jjetFPsaRyifsSP5BtwrfKi+fv3FmRmaZ9JUaLi
+FRhnBkp/1Wy1TbMz4GHrXb7pmA8y1x1LPC5aAVKRCfLf6o3YBkBjqhHk/sM3nhRS
+P/TizPJhk9H9Z2vXUq6/aKtAQ6BXNVN48FP4YUIHZMbXb5tMOA1jrGKvNouicwoN
+9SG9dKpN6nIDSdvHXx1iY8f93ZHsM+71bbRuMGjeyNYmsHVee7QHIJihdjK4TWxP
+AgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFMmAd+BikoL1Rpzz
+uvdMw964o605MA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQsFAAOCAgEAU18h
+9bqwOlI5LJKwbADJ784g7wbylp7ppHR/ehb8t/W2+xUbP6umwHJdELFx7rxP462s
+A20ucS6vxOOto70MEae0/0qyexAQH6dXQbLArvQsWdZHEIjzIVEpMMpghq9Gqx3t
+OluwlN5E40EIosHsHdb9T7bWR9AUC8rmyrV7d35BH16Dx7aMOZawP5aBQW9gkOLo
++fsicdl9sz1Gv7SEr5AcD48Saq/v7h56rgJKihcrdv6sVIkkLE8/trKnToyokZf7
+KcZ7XC25y2a2t6hbElGFtQl+Ynhw/qlqYLYdDnkM/crqJIByw5c/8nerQyIKx+u2
+DISCLIBrQYoIwOula9+ZEsuK1V6ADJHgJgg2SMX6OBE1/yWDLfJ6v9r9jv6ly0Us
+H8SIU653DtmadsWOLB2jutXsMq7Aqqz30XpN69QH4kj3Io6wpJ9qzo6ysmD0oyLQ
+I+uUWnpp3Q+/QFesa1lQ2aOZ4W7+jQF5JyMV3pKdewlNWudLSDBaGOYKbeaP4NK7
+5t98biGCwWg5TbSYWGZizEqQXsP6JwSxeRV0mcy+rSDeJmAc61ZRpqPq5KM/p/9h
+3PFaTWwyI0PurKju7koSCTxdccK+efrCh2gdC/1cacwG0Jp9VJkqyTkaGa9LKkPz
+Y11aWOIv4x3kqdbQCtCev9eBCfHJxyYNrJgWVqA=
+-----END CERTIFICATE-----
+
+# Issuer: CN=Buypass Class 3 Root CA O=Buypass AS-983163327
+# Subject: CN=Buypass Class 3 Root CA O=Buypass AS-983163327
+# Label: "Buypass Class 3 Root CA"
+# Serial: 2
+# MD5 Fingerprint: 3d:3b:18:9e:2c:64:5a:e8:d5:88:ce:0e:f9:37:c2:ec
+# SHA1 Fingerprint: da:fa:f7:fa:66:84:ec:06:8f:14:50:bd:c7:c2:81:a5:bc:a9:64:57
+# SHA256 Fingerprint: ed:f7:eb:bc:a2:7a:2a:38:4d:38:7b:7d:40:10:c6:66:e2:ed:b4:84:3e:4c:29:b4:ae:1d:5b:93:32:e6:b2:4d
+-----BEGIN CERTIFICATE-----
+MIIFWTCCA0GgAwIBAgIBAjANBgkqhkiG9w0BAQsFADBOMQswCQYDVQQGEwJOTzEd
+MBsGA1UECgwUQnV5cGFzcyBBUy05ODMxNjMzMjcxIDAeBgNVBAMMF0J1eXBhc3Mg
+Q2xhc3MgMyBSb290IENBMB4XDTEwMTAyNjA4Mjg1OFoXDTQwMTAyNjA4Mjg1OFow
+TjELMAkGA1UEBhMCTk8xHTAbBgNVBAoMFEJ1eXBhc3MgQVMtOTgzMTYzMzI3MSAw
+HgYDVQQDDBdCdXlwYXNzIENsYXNzIDMgUm9vdCBDQTCCAiIwDQYJKoZIhvcNAQEB
+BQADggIPADCCAgoCggIBAKXaCpUWUOOV8l6ddjEGMnqb8RB2uACatVI2zSRHsJ8Y
+ZLya9vrVediQYkwiL944PdbgqOkcLNt4EemOaFEVcsfzM4fkoF0LXOBXByow9c3E
+N3coTRiR5r/VUv1xLXA+58bEiuPwKAv0dpihi4dVsjoT/Lc+JzeOIuOoTyrvYLs9
+tznDDgFHmV0ST9tD+leh7fmdvhFHJlsTmKtdFoqwNxxXnUX/iJY2v7vKB3tvh2PX
+0DJq1l1sDPGzbjniazEuOQAnFN44wOwZZoYS6J1yFhNkUsepNxz9gjDthBgd9K5c
+/3ATAOux9TN6S9ZV+AWNS2mw9bMoNlwUxFFzTWsL8TQH2xc519woe2v1n/MuwU8X
+KhDzzMro6/1rqy6any2CbgTUUgGTLT2G/H783+9CHaZr77kgxve9oKeV/afmiSTY
+zIw0bOIjL9kSGiG5VZFvC5F5GQytQIgLcOJ60g7YaEi7ghM5EFjp2CoHxhLbWNvS
+O1UQRwUVZ2J+GGOmRj8JDlQyXr8NYnon74Do29lLBlo3WiXQCBJ31G8JUJc9yB3D
+34xFMFbG02SrZvPAXpacw8Tvw3xrizp5f7NJzz3iiZ+gMEuFuZyUJHmPfWupRWgP
+K9Dx2hzLabjKSWJtyNBjYt1gD1iqj6G8BaVmos8bdrKEZLFMOVLAMLrwjEsCsLa3
+AgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFEe4zf/lb+74suwv
+Tg75JbCOPGvDMA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQsFAAOCAgEAACAj
+QTUEkMJAYmDv4jVM1z+s4jSQuKFvdvoWFqRINyzpkMLyPPgKn9iB5btb2iUspKdV
+cSQy9sgL8rxq+JOssgfCX5/bzMiKqr5qb+FJEMwx14C7u8jYog5kV+qi9cKpMRXS
+IGrs/CIBKM+GuIAeqcwRpTzyFrNHnfzSgCHEy9BHcEGhyoMZCCxt8l13nIoUE9Q2
+HJLw5QY33KbmkJs4j1xrG0aGQ0JfPgEHU1RdZX33inOhmlRaHylDFCfChQ+1iHsa
+O5S3HWCntZznKWlXWpuTekMwGwPXYshApqr8ZORK15FTAaggiG6cX0S5y2CBNOxv
+033aSF/rtJC8LakcC6wc1aJoIIAE1vyxjy+7SjENSoYc6+I2KSb12tjE8nVhz36u
+dmNKekBlk4f4HoCMhuWG1o8O/FMsYOgWYRqiPkN7zTlgVGr18okmAWiDSKIz6MkE
+kbIRNBE+6tBDGR8Dk5AM/1E9V/RBbuHLoL7ryWPNbczk+DaqaJ3tvV2XcEQNtg41
+3OEMXbugUZTLfhbrES+jkkXITHHZvMmZUldGL1DPvTVp9D0VzgalLA8+9oG6lLvD
+u79leNKGef9JOxqDDPDeeOzI8k1MGt6CKfjBWtrt7uYnXuhF0J0cUahoq0Tj0Itq
+4/g7u9xN12TyUb7mqqta6THuBrxzvxNiCp/HuZc=
+-----END CERTIFICATE-----
+
+# Issuer: CN=T-TeleSec GlobalRoot Class 3 O=T-Systems Enterprise Services GmbH OU=T-Systems Trust Center
+# Subject: CN=T-TeleSec GlobalRoot Class 3 O=T-Systems Enterprise Services GmbH OU=T-Systems Trust Center
+# Label: "T-TeleSec GlobalRoot Class 3"
+# Serial: 1
+# MD5 Fingerprint: ca:fb:40:a8:4e:39:92:8a:1d:fe:8e:2f:c4:27:ea:ef
+# SHA1 Fingerprint: 55:a6:72:3e:cb:f2:ec:cd:c3:23:74:70:19:9d:2a:be:11:e3:81:d1
+# SHA256 Fingerprint: fd:73:da:d3:1c:64:4f:f1:b4:3b:ef:0c:cd:da:96:71:0b:9c:d9:87:5e:ca:7e:31:70:7a:f3:e9:6d:52:2b:bd
+-----BEGIN CERTIFICATE-----
+MIIDwzCCAqugAwIBAgIBATANBgkqhkiG9w0BAQsFADCBgjELMAkGA1UEBhMCREUx
+KzApBgNVBAoMIlQtU3lzdGVtcyBFbnRlcnByaXNlIFNlcnZpY2VzIEdtYkgxHzAd
+BgNVBAsMFlQtU3lzdGVtcyBUcnVzdCBDZW50ZXIxJTAjBgNVBAMMHFQtVGVsZVNl
+YyBHbG9iYWxSb290IENsYXNzIDMwHhcNMDgxMDAxMTAyOTU2WhcNMzMxMDAxMjM1
+OTU5WjCBgjELMAkGA1UEBhMCREUxKzApBgNVBAoMIlQtU3lzdGVtcyBFbnRlcnBy
+aXNlIFNlcnZpY2VzIEdtYkgxHzAdBgNVBAsMFlQtU3lzdGVtcyBUcnVzdCBDZW50
+ZXIxJTAjBgNVBAMMHFQtVGVsZVNlYyBHbG9iYWxSb290IENsYXNzIDMwggEiMA0G
+CSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQC9dZPwYiJvJK7genasfb3ZJNW4t/zN
+8ELg63iIVl6bmlQdTQyK9tPPcPRStdiTBONGhnFBSivwKixVA9ZIw+A5OO3yXDw/
+RLyTPWGrTs0NvvAgJ1gORH8EGoel15YUNpDQSXuhdfsaa3Ox+M6pCSzyU9XDFES4
+hqX2iys52qMzVNn6chr3IhUciJFrf2blw2qAsCTz34ZFiP0Zf3WHHx+xGwpzJFu5
+ZeAsVMhg02YXP+HMVDNzkQI6pn97djmiH5a2OK61yJN0HZ65tOVgnS9W0eDrXltM
+EnAMbEQgqxHY9Bn20pxSN+f6tsIxO0rUFJmtxxr1XV/6B7h8DR/Wgx6zAgMBAAGj
+QjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBS1
+A/d2O2GCahKqGFPrAyGUv/7OyjANBgkqhkiG9w0BAQsFAAOCAQEAVj3vlNW92nOy
+WL6ukK2YJ5f+AbGwUgC4TeQbIXQbfsDuXmkqJa9c1h3a0nnJ85cp4IaH3gRZD/FZ
+1GSFS5mvJQQeyUapl96Cshtwn5z2r3Ex3XsFpSzTucpH9sry9uetuUg/vBa3wW30
+6gmv7PO15wWeph6KU1HWk4HMdJP2udqmJQV0eVp+QD6CSyYRMG7hP0HHRwA11fXT
+91Q+gT3aSWqas+8QPebrb9HIIkfLzM8BMZLZGOMivgkeGj5asuRrDFR6fUNOuIml
+e9eiPZaGzPImNC1qkp2aGtAw4l1OBLBfiyB+d8E9lYLRRpo7PHi4b6HQDWSieB4p
+TpPDpFQUWw==
+-----END CERTIFICATE-----
+
+# Issuer: CN=D-TRUST Root Class 3 CA 2 2009 O=D-Trust GmbH
+# Subject: CN=D-TRUST Root Class 3 CA 2 2009 O=D-Trust GmbH
+# Label: "D-TRUST Root Class 3 CA 2 2009"
+# Serial: 623603
+# MD5 Fingerprint: cd:e0:25:69:8d:47:ac:9c:89:35:90:f7:fd:51:3d:2f
+# SHA1 Fingerprint: 58:e8:ab:b0:36:15:33:fb:80:f7:9b:1b:6d:29:d3:ff:8d:5f:00:f0
+# SHA256 Fingerprint: 49:e7:a4:42:ac:f0:ea:62:87:05:00:54:b5:25:64:b6:50:e4:f4:9e:42:e3:48:d6:aa:38:e0:39:e9:57:b1:c1
+-----BEGIN CERTIFICATE-----
+MIIEMzCCAxugAwIBAgIDCYPzMA0GCSqGSIb3DQEBCwUAME0xCzAJBgNVBAYTAkRF
+MRUwEwYDVQQKDAxELVRydXN0IEdtYkgxJzAlBgNVBAMMHkQtVFJVU1QgUm9vdCBD
+bGFzcyAzIENBIDIgMjAwOTAeFw0wOTExMDUwODM1NThaFw0yOTExMDUwODM1NTha
+ME0xCzAJBgNVBAYTAkRFMRUwEwYDVQQKDAxELVRydXN0IEdtYkgxJzAlBgNVBAMM
+HkQtVFJVU1QgUm9vdCBDbGFzcyAzIENBIDIgMjAwOTCCASIwDQYJKoZIhvcNAQEB
+BQADggEPADCCAQoCggEBANOySs96R+91myP6Oi/WUEWJNTrGa9v+2wBoqOADER03
+UAifTUpolDWzU9GUY6cgVq/eUXjsKj3zSEhQPgrfRlWLJ23DEE0NkVJD2IfgXU42
+tSHKXzlABF9bfsyjxiupQB7ZNoTWSPOSHjRGICTBpFGOShrvUD9pXRl/RcPHAY9R
+ySPocq60vFYJfxLLHLGvKZAKyVXMD9O0Gu1HNVpK7ZxzBCHQqr0ME7UAyiZsxGsM
+lFqVlNpQmvH/pStmMaTJOKDfHR+4CS7zp+hnUquVH+BGPtikw8paxTGA6Eian5Rp
+/hnd2HN8gcqW3o7tszIFZYQ05ub9VxC1X3a/L7AQDcUCAwEAAaOCARowggEWMA8G
+A1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFP3aFMSfMN4hvR5COfyrYyNJ4PGEMA4G
+A1UdDwEB/wQEAwIBBjCB0wYDVR0fBIHLMIHIMIGAoH6gfIZ6bGRhcDovL2RpcmVj
+dG9yeS5kLXRydXN0Lm5ldC9DTj1ELVRSVVNUJTIwUm9vdCUyMENsYXNzJTIwMyUy
+MENBJTIwMiUyMDIwMDksTz1ELVRydXN0JTIwR21iSCxDPURFP2NlcnRpZmljYXRl
+cmV2b2NhdGlvbmxpc3QwQ6BBoD+GPWh0dHA6Ly93d3cuZC10cnVzdC5uZXQvY3Js
+L2QtdHJ1c3Rfcm9vdF9jbGFzc18zX2NhXzJfMjAwOS5jcmwwDQYJKoZIhvcNAQEL
+BQADggEBAH+X2zDI36ScfSF6gHDOFBJpiBSVYEQBrLLpME+bUMJm2H6NMLVwMeni
+acfzcNsgFYbQDfC+rAF1hM5+n02/t2A7nPPKHeJeaNijnZflQGDSNiH+0LS4F9p0
+o3/U37CYAqxva2ssJSRyoWXuJVrl5jLn8t+rSfrzkGkj2wTZ51xY/GXUl77M/C4K
+zCUqNQT4YJEVdT1B/yMfGchs64JTBKbkTCJNjYy6zltz7GRUUG3RnFX7acM2w4y8
+PIWmawomDeCTmGCufsYkl4phX5GOZpIJhzbNi5stPvZR1FDUWSi9g/LMKHtThm3Y
+Johw1+qRzT65ysCQblrGXnRl11z+o+I=
+-----END CERTIFICATE-----
+
+# Issuer: CN=D-TRUST Root Class 3 CA 2 EV 2009 O=D-Trust GmbH
+# Subject: CN=D-TRUST Root Class 3 CA 2 EV 2009 O=D-Trust GmbH
+# Label: "D-TRUST Root Class 3 CA 2 EV 2009"
+# Serial: 623604
+# MD5 Fingerprint: aa:c6:43:2c:5e:2d:cd:c4:34:c0:50:4f:11:02:4f:b6
+# SHA1 Fingerprint: 96:c9:1b:0b:95:b4:10:98:42:fa:d0:d8:22:79:fe:60:fa:b9:16:83
+# SHA256 Fingerprint: ee:c5:49:6b:98:8c:e9:86:25:b9:34:09:2e:ec:29:08:be:d0:b0:f3:16:c2:d4:73:0c:84:ea:f1:f3:d3:48:81
+-----BEGIN CERTIFICATE-----
+MIIEQzCCAyugAwIBAgIDCYP0MA0GCSqGSIb3DQEBCwUAMFAxCzAJBgNVBAYTAkRF
+MRUwEwYDVQQKDAxELVRydXN0IEdtYkgxKjAoBgNVBAMMIUQtVFJVU1QgUm9vdCBD
+bGFzcyAzIENBIDIgRVYgMjAwOTAeFw0wOTExMDUwODUwNDZaFw0yOTExMDUwODUw
+NDZaMFAxCzAJBgNVBAYTAkRFMRUwEwYDVQQKDAxELVRydXN0IEdtYkgxKjAoBgNV
+BAMMIUQtVFJVU1QgUm9vdCBDbGFzcyAzIENBIDIgRVYgMjAwOTCCASIwDQYJKoZI
+hvcNAQEBBQADggEPADCCAQoCggEBAJnxhDRwui+3MKCOvXwEz75ivJn9gpfSegpn
+ljgJ9hBOlSJzmY3aFS3nBfwZcyK3jpgAvDw9rKFs+9Z5JUut8Mxk2og+KbgPCdM0
+3TP1YtHhzRnp7hhPTFiu4h7WDFsVWtg6uMQYZB7jM7K1iXdODL/ZlGsTl28So/6Z
+qQTMFexgaDbtCHu39b+T7WYxg4zGcTSHThfqr4uRjRxWQa4iN1438h3Z0S0NL2lR
+p75mpoo6Kr3HGrHhFPC+Oh25z1uxav60sUYgovseO3Dvk5h9jHOW8sXvhXCtKSb8
+HgQ+HKDYD8tSg2J87otTlZCpV6LqYQXY+U3EJ/pure3511H3a6UCAwEAAaOCASQw
+ggEgMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFNOUikxiEyoZLsyvcop9Ntea
+HNxnMA4GA1UdDwEB/wQEAwIBBjCB3QYDVR0fBIHVMIHSMIGHoIGEoIGBhn9sZGFw
+Oi8vZGlyZWN0b3J5LmQtdHJ1c3QubmV0L0NOPUQtVFJVU1QlMjBSb290JTIwQ2xh
+c3MlMjAzJTIwQ0ElMjAyJTIwRVYlMjAyMDA5LE89RC1UcnVzdCUyMEdtYkgsQz1E
+RT9jZXJ0aWZpY2F0ZXJldm9jYXRpb25saXN0MEagRKBChkBodHRwOi8vd3d3LmQt
+dHJ1c3QubmV0L2NybC9kLXRydXN0X3Jvb3RfY2xhc3NfM19jYV8yX2V2XzIwMDku
+Y3JsMA0GCSqGSIb3DQEBCwUAA4IBAQA07XtaPKSUiO8aEXUHL7P+PPoeUSbrh/Yp
+3uDx1MYkCenBz1UbtDDZzhr+BlGmFaQt77JLvyAoJUnRpjZ3NOhk31KxEcdzes05
+nsKtjHEh8lprr988TlWvsoRlFIm5d8sqMb7Po23Pb0iUMkZv53GMoKaEGTcH8gNF
+CSuGdXzfX2lXANtu2KZyIktQ1HWYVt+3GP9DQ1CuekR78HlR10M9p9OB0/DJT7na
+xpeG0ILD5EJt/rDiZE4OJudANCa1CInXCGNjOCd1HjPqbqjdn5lPdE2BiYBL3ZqX
+KVwvvoFBuYz/6n1gBp7N1z3TLqMVvKjmJuVvw9y4AyHqnxbxLFS1
+-----END CERTIFICATE-----
+
+# Issuer: CN=CA Disig Root R2 O=Disig a.s.
+# Subject: CN=CA Disig Root R2 O=Disig a.s.
+# Label: "CA Disig Root R2"
+# Serial: 10572350602393338211
+# MD5 Fingerprint: 26:01:fb:d8:27:a7:17:9a:45:54:38:1a:43:01:3b:03
+# SHA1 Fingerprint: b5:61:eb:ea:a4:de:e4:25:4b:69:1a:98:a5:57:47:c2:34:c7:d9:71
+# SHA256 Fingerprint: e2:3d:4a:03:6d:7b:70:e9:f5:95:b1:42:20:79:d2:b9:1e:df:bb:1f:b6:51:a0:63:3e:aa:8a:9d:c5:f8:07:03
+-----BEGIN CERTIFICATE-----
+MIIFaTCCA1GgAwIBAgIJAJK4iNuwisFjMA0GCSqGSIb3DQEBCwUAMFIxCzAJBgNV
+BAYTAlNLMRMwEQYDVQQHEwpCcmF0aXNsYXZhMRMwEQYDVQQKEwpEaXNpZyBhLnMu
+MRkwFwYDVQQDExBDQSBEaXNpZyBSb290IFIyMB4XDTEyMDcxOTA5MTUzMFoXDTQy
+MDcxOTA5MTUzMFowUjELMAkGA1UEBhMCU0sxEzARBgNVBAcTCkJyYXRpc2xhdmEx
+EzARBgNVBAoTCkRpc2lnIGEucy4xGTAXBgNVBAMTEENBIERpc2lnIFJvb3QgUjIw
+ggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCio8QACdaFXS1tFPbCw3Oe
+NcJxVX6B+6tGUODBfEl45qt5WDza/3wcn9iXAng+a0EE6UG9vgMsRfYvZNSrXaNH
+PWSb6WiaxswbP7q+sos0Ai6YVRn8jG+qX9pMzk0DIaPY0jSTVpbLTAwAFjxfGs3I
+x2ymrdMxp7zo5eFm1tL7A7RBZckQrg4FY8aAamkw/dLukO8NJ9+flXP04SXabBbe
+QTg06ov80egEFGEtQX6sx3dOy1FU+16SGBsEWmjGycT6txOgmLcRK7fWV8x8nhfR
+yyX+hk4kLlYMeE2eARKmK6cBZW58Yh2EhN/qwGu1pSqVg8NTEQxzHQuyRpDRQjrO
+QG6Vrf/GlK1ul4SOfW+eioANSW1z4nuSHsPzwfPrLgVv2RvPN3YEyLRa5Beny912
+H9AZdugsBbPWnDTYltxhh5EF5EQIM8HauQhl1K6yNg3ruji6DOWbnuuNZt2Zz9aJ
+QfYEkoopKW1rOhzndX0CcQ7zwOe9yxndnWCywmZgtrEE7snmhrmaZkCo5xHtgUUD
+i/ZnWejBBhG93c+AAk9lQHhcR1DIm+YfgXvkRKhbhZri3lrVx/k6RGZL5DJUfORs
+nLMOPReisjQS1n6yqEm70XooQL6iFh/f5DcfEXP7kAplQ6INfPgGAVUzfbANuPT1
+rqVCV3w2EYx7XsQDnYx5nQIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1Ud
+DwEB/wQEAwIBBjAdBgNVHQ4EFgQUtZn4r7CU9eMg1gqtzk5WpC5uQu0wDQYJKoZI
+hvcNAQELBQADggIBACYGXnDnZTPIgm7ZnBc6G3pmsgH2eDtpXi/q/075KMOYKmFM
+tCQSin1tERT3nLXK5ryeJ45MGcipvXrA1zYObYVybqjGom32+nNjf7xueQgcnYqf
+GopTpti72TVVsRHFqQOzVju5hJMiXn7B9hJSi+osZ7z+Nkz1uM/Rs0mSO9MpDpkb
+lvdhuDvEK7Z4bLQjb/D907JedR+Zlais9trhxTF7+9FGs9K8Z7RiVLoJ92Owk6Ka
++elSLotgEqv89WBW7xBci8QaQtyDW2QOy7W81k/BfDxujRNt+3vrMNDcTa/F1bal
+TFtxyegxvug4BkihGuLq0t4SOVga/4AOgnXmt8kHbA7v/zjxmHHEt38OFdAlab0i
+nSvtBfZGR6ztwPDUO+Ls7pZbkBNOHlY667DvlruWIxG68kOGdGSVyCh13x01utI3
+gzhTODY7z2zp+WsO0PsE6E9312UBeIYMej4hYvF/Y3EMyZ9E26gnonW+boE+18Dr
+G5gPcFw0sorMwIUY6256s/daoQe/qUKS82Ail+QUoQebTnbAjn39pCXHR+3/H3Os
+zMOl6W8KjptlwlCFtaOgUxLMVYdh84GuEEZhvUQhuMI9dM9+JDX6HAcOmz0iyu8x
+L4ysEr3vQCj8KWefshNPZiTEUxnpHikV7+ZtsH8tZ/3zbBt1RqPlShfppNcL
+-----END CERTIFICATE-----
+
+# Issuer: CN=ACCVRAIZ1 O=ACCV OU=PKIACCV
+# Subject: CN=ACCVRAIZ1 O=ACCV OU=PKIACCV
+# Label: "ACCVRAIZ1"
+# Serial: 6828503384748696800
+# MD5 Fingerprint: d0:a0:5a:ee:05:b6:09:94:21:a1:7d:f1:b2:29:82:02
+# SHA1 Fingerprint: 93:05:7a:88:15:c6:4f:ce:88:2f:fa:91:16:52:28:78:bc:53:64:17
+# SHA256 Fingerprint: 9a:6e:c0:12:e1:a7:da:9d:be:34:19:4d:47:8a:d7:c0:db:18:22:fb:07:1d:f1:29:81:49:6e:d1:04:38:41:13
+-----BEGIN CERTIFICATE-----
+MIIH0zCCBbugAwIBAgIIXsO3pkN/pOAwDQYJKoZIhvcNAQEFBQAwQjESMBAGA1UE
+AwwJQUNDVlJBSVoxMRAwDgYDVQQLDAdQS0lBQ0NWMQ0wCwYDVQQKDARBQ0NWMQsw
+CQYDVQQGEwJFUzAeFw0xMTA1MDUwOTM3MzdaFw0zMDEyMzEwOTM3MzdaMEIxEjAQ
+BgNVBAMMCUFDQ1ZSQUlaMTEQMA4GA1UECwwHUEtJQUNDVjENMAsGA1UECgwEQUND
+VjELMAkGA1UEBhMCRVMwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCb
+qau/YUqXry+XZpp0X9DZlv3P4uRm7x8fRzPCRKPfmt4ftVTdFXxpNRFvu8gMjmoY
+HtiP2Ra8EEg2XPBjs5BaXCQ316PWywlxufEBcoSwfdtNgM3802/J+Nq2DoLSRYWo
+G2ioPej0RGy9ocLLA76MPhMAhN9KSMDjIgro6TenGEyxCQ0jVn8ETdkXhBilyNpA
+lHPrzg5XPAOBOp0KoVdDaaxXbXmQeOW1tDvYvEyNKKGno6e6Ak4l0Squ7a4DIrhr
+IA8wKFSVf+DuzgpmndFALW4ir50awQUZ0m/A8p/4e7MCQvtQqR0tkw8jq8bBD5L/
+0KIV9VMJcRz/RROE5iZe+OCIHAr8Fraocwa48GOEAqDGWuzndN9wrqODJerWx5eH
+k6fGioozl2A3ED6XPm4pFdahD9GILBKfb6qkxkLrQaLjlUPTAYVtjrs78yM2x/47
+4KElB0iryYl0/wiPgL/AlmXz7uxLaL2diMMxs0Dx6M/2OLuc5NF/1OVYm3z61PMO
+m3WR5LpSLhl+0fXNWhn8ugb2+1KoS5kE3fj5tItQo05iifCHJPqDQsGH+tUtKSpa
+cXpkatcnYGMN285J9Y0fkIkyF/hzQ7jSWpOGYdbhdQrqeWZ2iE9x6wQl1gpaepPl
+uUsXQA+xtrn13k/c4LOsOxFwYIRKQ26ZIMApcQrAZQIDAQABo4ICyzCCAscwfQYI
+KwYBBQUHAQEEcTBvMEwGCCsGAQUFBzAChkBodHRwOi8vd3d3LmFjY3YuZXMvZmls
+ZWFkbWluL0FyY2hpdm9zL2NlcnRpZmljYWRvcy9yYWl6YWNjdjEuY3J0MB8GCCsG
+AQUFBzABhhNodHRwOi8vb2NzcC5hY2N2LmVzMB0GA1UdDgQWBBTSh7Tj3zcnk1X2
+VuqB5TbMjB4/vTAPBgNVHRMBAf8EBTADAQH/MB8GA1UdIwQYMBaAFNKHtOPfNyeT
+VfZW6oHlNsyMHj+9MIIBcwYDVR0gBIIBajCCAWYwggFiBgRVHSAAMIIBWDCCASIG
+CCsGAQUFBwICMIIBFB6CARAAQQB1AHQAbwByAGkAZABhAGQAIABkAGUAIABDAGUA
+cgB0AGkAZgBpAGMAYQBjAGkA8wBuACAAUgBhAO0AegAgAGQAZQAgAGwAYQAgAEEA
+QwBDAFYAIAAoAEEAZwBlAG4AYwBpAGEAIABkAGUAIABUAGUAYwBuAG8AbABvAGcA
+7QBhACAAeQAgAEMAZQByAHQAaQBmAGkAYwBhAGMAaQDzAG4AIABFAGwAZQBjAHQA
+cgDzAG4AaQBjAGEALAAgAEMASQBGACAAUQA0ADYAMAAxADEANQA2AEUAKQAuACAA
+QwBQAFMAIABlAG4AIABoAHQAdABwADoALwAvAHcAdwB3AC4AYQBjAGMAdgAuAGUA
+czAwBggrBgEFBQcCARYkaHR0cDovL3d3dy5hY2N2LmVzL2xlZ2lzbGFjaW9uX2Mu
+aHRtMFUGA1UdHwROMEwwSqBIoEaGRGh0dHA6Ly93d3cuYWNjdi5lcy9maWxlYWRt
+aW4vQXJjaGl2b3MvY2VydGlmaWNhZG9zL3JhaXphY2N2MV9kZXIuY3JsMA4GA1Ud
+DwEB/wQEAwIBBjAXBgNVHREEEDAOgQxhY2N2QGFjY3YuZXMwDQYJKoZIhvcNAQEF
+BQADggIBAJcxAp/n/UNnSEQU5CmH7UwoZtCPNdpNYbdKl02125DgBS4OxnnQ8pdp
+D70ER9m+27Up2pvZrqmZ1dM8MJP1jaGo/AaNRPTKFpV8M9xii6g3+CfYCS0b78gU
+JyCpZET/LtZ1qmxNYEAZSUNUY9rizLpm5U9EelvZaoErQNV/+QEnWCzI7UiRfD+m
+AM/EKXMRNt6GGT6d7hmKG9Ww7Y49nCrADdg9ZuM8Db3VlFzi4qc1GwQA9j9ajepD
+vV+JHanBsMyZ4k0ACtrJJ1vnE5Bc5PUzolVt3OAJTS+xJlsndQAJxGJ3KQhfnlms
+tn6tn1QwIgPBHnFk/vk4CpYY3QIUrCPLBhwepH2NDd4nQeit2hW3sCPdK6jT2iWH
+7ehVRE2I9DZ+hJp4rPcOVkkO1jMl1oRQQmwgEh0q1b688nCBpHBgvgW1m54ERL5h
+I6zppSSMEYCUWqKiuUnSwdzRp+0xESyeGabu4VXhwOrPDYTkF7eifKXeVSUG7szA
+h1xA2syVP1XgNce4hL60Xc16gwFy7ofmXx2utYXGJt/mwZrpHgJHnyqobalbz+xF
+d3+YJ5oyXSrjhO7FmGYvliAd3djDJ9ew+f7Zfc3Qn48LFFhRny+Lwzgt3uiP1o2H
+pPVWQxaZLPSkVrQ0uGE3ycJYgBugl6H8WY3pEfbRD0tVNEYqi4Y7
+-----END CERTIFICATE-----
+
+# Issuer: CN=TWCA Global Root CA O=TAIWAN-CA OU=Root CA
+# Subject: CN=TWCA Global Root CA O=TAIWAN-CA OU=Root CA
+# Label: "TWCA Global Root CA"
+# Serial: 3262
+# MD5 Fingerprint: f9:03:7e:cf:e6:9e:3c:73:7a:2a:90:07:69:ff:2b:96
+# SHA1 Fingerprint: 9c:bb:48:53:f6:a4:f6:d3:52:a4:e8:32:52:55:60:13:f5:ad:af:65
+# SHA256 Fingerprint: 59:76:90:07:f7:68:5d:0f:cd:50:87:2f:9f:95:d5:75:5a:5b:2b:45:7d:81:f3:69:2b:61:0a:98:67:2f:0e:1b
+-----BEGIN CERTIFICATE-----
+MIIFQTCCAymgAwIBAgICDL4wDQYJKoZIhvcNAQELBQAwUTELMAkGA1UEBhMCVFcx
+EjAQBgNVBAoTCVRBSVdBTi1DQTEQMA4GA1UECxMHUm9vdCBDQTEcMBoGA1UEAxMT
+VFdDQSBHbG9iYWwgUm9vdCBDQTAeFw0xMjA2MjcwNjI4MzNaFw0zMDEyMzExNTU5
+NTlaMFExCzAJBgNVBAYTAlRXMRIwEAYDVQQKEwlUQUlXQU4tQ0ExEDAOBgNVBAsT
+B1Jvb3QgQ0ExHDAaBgNVBAMTE1RXQ0EgR2xvYmFsIFJvb3QgQ0EwggIiMA0GCSqG
+SIb3DQEBAQUAA4ICDwAwggIKAoICAQCwBdvI64zEbooh745NnHEKH1Jw7W2CnJfF
+10xORUnLQEK1EjRsGcJ0pDFfhQKX7EMzClPSnIyOt7h52yvVavKOZsTuKwEHktSz
+0ALfUPZVr2YOy+BHYC8rMjk1Ujoog/h7FsYYuGLWRyWRzvAZEk2tY/XTP3VfKfCh
+MBwqoJimFb3u/Rk28OKRQ4/6ytYQJ0lM793B8YVwm8rqqFpD/G2Gb3PpN0Wp8DbH
+zIh1HrtsBv+baz4X7GGqcXzGHaL3SekVtTzWoWH1EfcFbx39Eb7QMAfCKbAJTibc
+46KokWofwpFFiFzlmLhxpRUZyXx1EcxwdE8tmx2RRP1WKKD+u4ZqyPpcC1jcxkt2
+yKsi2XMPpfRaAok/T54igu6idFMqPVMnaR1sjjIsZAAmY2E2TqNGtz99sy2sbZCi
+laLOz9qC5wc0GZbpuCGqKX6mOL6OKUohZnkfs8O1CWfe1tQHRvMq2uYiN2DLgbYP
+oA/pyJV/v1WRBXrPPRXAb94JlAGD1zQbzECl8LibZ9WYkTunhHiVJqRaCPgrdLQA
+BDzfuBSO6N+pjWxnkjMdwLfS7JLIvgm/LCkFbwJrnu+8vyq8W8BQj0FwcYeyTbcE
+qYSjMq+u7msXi7Kx/mzhkIyIqJdIzshNy/MGz19qCkKxHh53L46g5pIOBvwFItIm
+4TFRfTLcDwIDAQABoyMwITAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB
+/zANBgkqhkiG9w0BAQsFAAOCAgEAXzSBdu+WHdXltdkCY4QWwa6gcFGn90xHNcgL
+1yg9iXHZqjNB6hQbbCEAwGxCGX6faVsgQt+i0trEfJdLjbDorMjupWkEmQqSpqsn
+LhpNgb+E1HAerUf+/UqdM+DyucRFCCEK2mlpc3INvjT+lIutwx4116KD7+U4x6WF
+H6vPNOw/KP4M8VeGTslV9xzU2KV9Bnpv1d8Q34FOIWWxtuEXeZVFBs5fzNxGiWNo
+RI2T9GRwoD2dKAXDOXC4Ynsg/eTb6QihuJ49CcdP+yz4k3ZB3lLg4VfSnQO8d57+
+nile98FRYB/e2guyLXW3Q0iT5/Z5xoRdgFlglPx4mI88k1HtQJAH32RjJMtOcQWh
+15QaiDLxInQirqWm2BJpTGCjAu4r7NRjkgtevi92a6O2JryPA9gK8kxkRr05YuWW
+6zRjESjMlfGt7+/cgFhI6Uu46mWs6fyAtbXIRfmswZ/ZuepiiI7E8UuDEq3mi4TW
+nsLrgxifarsbJGAzcMzs9zLzXNl5fe+epP7JI8Mk7hWSsT2RTyaGvWZzJBPqpK5j
+wa19hAM8EHiGG3njxPPyBJUgriOCxLM6AGK/5jYk4Ve6xx6QddVfP5VhK8E7zeWz
+aGHQRiapIVJpLesux+t3zqY6tQMzT3bR51xUAV3LePTJDL/PEo4XLSNolOer/qmy
+KwbQBM0=
+-----END CERTIFICATE-----
+
+# Issuer: CN=TeliaSonera Root CA v1 O=TeliaSonera
+# Subject: CN=TeliaSonera Root CA v1 O=TeliaSonera
+# Label: "TeliaSonera Root CA v1"
+# Serial: 199041966741090107964904287217786801558
+# MD5 Fingerprint: 37:41:49:1b:18:56:9a:26:f5:ad:c2:66:fb:40:a5:4c
+# SHA1 Fingerprint: 43:13:bb:96:f1:d5:86:9b:c1:4e:6a:92:f6:cf:f6:34:69:87:82:37
+# SHA256 Fingerprint: dd:69:36:fe:21:f8:f0:77:c1:23:a1:a5:21:c1:22:24:f7:22:55:b7:3e:03:a7:26:06:93:e8:a2:4b:0f:a3:89
+-----BEGIN CERTIFICATE-----
+MIIFODCCAyCgAwIBAgIRAJW+FqD3LkbxezmCcvqLzZYwDQYJKoZIhvcNAQEFBQAw
+NzEUMBIGA1UECgwLVGVsaWFTb25lcmExHzAdBgNVBAMMFlRlbGlhU29uZXJhIFJv
+b3QgQ0EgdjEwHhcNMDcxMDE4MTIwMDUwWhcNMzIxMDE4MTIwMDUwWjA3MRQwEgYD
+VQQKDAtUZWxpYVNvbmVyYTEfMB0GA1UEAwwWVGVsaWFTb25lcmEgUm9vdCBDQSB2
+MTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAMK+6yfwIaPzaSZVfp3F
+VRaRXP3vIb9TgHot0pGMYzHw7CTww6XScnwQbfQ3t+XmfHnqjLWCi65ItqwA3GV1
+7CpNX8GH9SBlK4GoRz6JI5UwFpB/6FcHSOcZrr9FZ7E3GwYq/t75rH2D+1665I+X
+Z75Ljo1kB1c4VWk0Nj0TSO9P4tNmHqTPGrdeNjPUtAa9GAH9d4RQAEX1jF3oI7x+
+/jXh7VB7qTCNGdMJjmhnXb88lxhTuylixcpecsHHltTbLaC0H2kD7OriUPEMPPCs
+81Mt8Bz17Ww5OXOAFshSsCPN4D7c3TxHoLs1iuKYaIu+5b9y7tL6pe0S7fyYGKkm
+dtwoSxAgHNN/Fnct7W+A90m7UwW7XWjH1Mh1Fj+JWov3F0fUTPHSiXk+TT2YqGHe
+Oh7S+F4D4MHJHIzTjU3TlTazN19jY5szFPAtJmtTfImMMsJu7D0hADnJoWjiUIMu
+sDor8zagrC/kb2HCUQk5PotTubtn2txTuXZZNp1D5SDgPTJghSJRt8czu90VL6R4
+pgd7gUY2BIbdeTXHlSw7sKMXNeVzH7RcWe/a6hBle3rQf5+ztCo3O3CLm1u5K7fs
+slESl1MpWtTwEhDcTwK7EpIvYtQ/aUN8Ddb8WHUBiJ1YFkveupD/RwGJBmr2X7KQ
+arMCpgKIv7NHfirZ1fpoeDVNAgMBAAGjPzA9MA8GA1UdEwEB/wQFMAMBAf8wCwYD
+VR0PBAQDAgEGMB0GA1UdDgQWBBTwj1k4ALP1j5qWDNXr+nuqF+gTEjANBgkqhkiG
+9w0BAQUFAAOCAgEAvuRcYk4k9AwI//DTDGjkk0kiP0Qnb7tt3oNmzqjMDfz1mgbl
+dxSR651Be5kqhOX//CHBXfDkH1e3damhXwIm/9fH907eT/j3HEbAek9ALCI18Bmx
+0GtnLLCo4MBANzX2hFxc469CeP6nyQ1Q6g2EdvZR74NTxnr/DlZJLo961gzmJ1Tj
+TQpgcmLNkQfWpb/ImWvtxBnmq0wROMVvMeJuScg/doAmAyYp4Db29iBT4xdwNBed
+Y2gea+zDTYa4EzAvXUYNR0PVG6pZDrlcjQZIrXSHX8f8MVRBE+LHIQ6e4B4N4cB7
+Q4WQxYpYxmUKeFfyxiMPAdkgS94P+5KFdSpcc41teyWRyu5FrgZLAMzTsVlQ2jqI
+OylDRl6XK1TOU2+NSueW+r9xDkKLfP0ooNBIytrEgUy7onOTJsjrDNYmiLbAJM+7
+vVvrdX3pCI6GMyx5dwlppYn8s3CQh3aP0yK7Qs69cwsgJirQmz1wHiRszYd2qReW
+t88NkvuOGKmYSdGe/mBEciG5Ge3C9THxOUiIkCR1VBatzvT4aRRkOfujuLpwQMcn
+HL/EVlP6Y2XQ8xwOFvVrhlhNGNTkDY6lnVuR3HYkUD/GKvvZt5y11ubQ2egZixVx
+SK236thZiNSQvxaz2emsWWFUyBy6ysHK4bkgTI86k4mloMy/0/Z1pHWWbVY=
+-----END CERTIFICATE-----
+
+# Issuer: CN=T-TeleSec GlobalRoot Class 2 O=T-Systems Enterprise Services GmbH OU=T-Systems Trust Center
+# Subject: CN=T-TeleSec GlobalRoot Class 2 O=T-Systems Enterprise Services GmbH OU=T-Systems Trust Center
+# Label: "T-TeleSec GlobalRoot Class 2"
+# Serial: 1
+# MD5 Fingerprint: 2b:9b:9e:e4:7b:6c:1f:00:72:1a:cc:c1:77:79:df:6a
+# SHA1 Fingerprint: 59:0d:2d:7d:88:4f:40:2e:61:7e:a5:62:32:17:65:cf:17:d8:94:e9
+# SHA256 Fingerprint: 91:e2:f5:78:8d:58:10:eb:a7:ba:58:73:7d:e1:54:8a:8e:ca:cd:01:45:98:bc:0b:14:3e:04:1b:17:05:25:52
+-----BEGIN CERTIFICATE-----
+MIIDwzCCAqugAwIBAgIBATANBgkqhkiG9w0BAQsFADCBgjELMAkGA1UEBhMCREUx
+KzApBgNVBAoMIlQtU3lzdGVtcyBFbnRlcnByaXNlIFNlcnZpY2VzIEdtYkgxHzAd
+BgNVBAsMFlQtU3lzdGVtcyBUcnVzdCBDZW50ZXIxJTAjBgNVBAMMHFQtVGVsZVNl
+YyBHbG9iYWxSb290IENsYXNzIDIwHhcNMDgxMDAxMTA0MDE0WhcNMzMxMDAxMjM1
+OTU5WjCBgjELMAkGA1UEBhMCREUxKzApBgNVBAoMIlQtU3lzdGVtcyBFbnRlcnBy
+aXNlIFNlcnZpY2VzIEdtYkgxHzAdBgNVBAsMFlQtU3lzdGVtcyBUcnVzdCBDZW50
+ZXIxJTAjBgNVBAMMHFQtVGVsZVNlYyBHbG9iYWxSb290IENsYXNzIDIwggEiMA0G
+CSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCqX9obX+hzkeXaXPSi5kfl82hVYAUd
+AqSzm1nzHoqvNK38DcLZSBnuaY/JIPwhqgcZ7bBcrGXHX+0CfHt8LRvWurmAwhiC
+FoT6ZrAIxlQjgeTNuUk/9k9uN0goOA/FvudocP05l03Sx5iRUKrERLMjfTlH6VJi
+1hKTXrcxlkIF+3anHqP1wvzpesVsqXFP6st4vGCvx9702cu+fjOlbpSD8DT6Iavq
+jnKgP6TeMFvvhk1qlVtDRKgQFRzlAVfFmPHmBiiRqiDFt1MmUUOyCxGVWOHAD3bZ
+wI18gfNycJ5v/hqO2V81xrJvNHy+SE/iWjnX2J14np+GPgNeGYtEotXHAgMBAAGj
+QjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBS/
+WSA2AHmgoCJrjNXyYdK4LMuCSjANBgkqhkiG9w0BAQsFAAOCAQEAMQOiYQsfdOhy
+NsZt+U2e+iKo4YFWz827n+qrkRk4r6p8FU3ztqONpfSO9kSpp+ghla0+AGIWiPAC
+uvxhI+YzmzB6azZie60EI4RYZeLbK4rnJVM3YlNfvNoBYimipidx5joifsFvHZVw
+IEoHNN/q/xWA5brXethbdXwFeilHfkCoMRN3zUA7tFFHei4R40cR3p1m0IvVVGb6
+g1XqfMIpiRvpb7PO4gWEyS8+eIVibslfwXhjdFjASBgMmTnrpMwatXlajRWc2BQN
+9noHV8cigwUtPJslJj0Ys6lDfMjIq2SPDqO/nBudMNva0Bkuqjzx+zOAduTNrRlP
+BSeOE6Fuwg==
+-----END CERTIFICATE-----
+
+# Issuer: CN=Atos TrustedRoot 2011 O=Atos
+# Subject: CN=Atos TrustedRoot 2011 O=Atos
+# Label: "Atos TrustedRoot 2011"
+# Serial: 6643877497813316402
+# MD5 Fingerprint: ae:b9:c4:32:4b:ac:7f:5d:66:cc:77:94:bb:2a:77:56
+# SHA1 Fingerprint: 2b:b1:f5:3e:55:0c:1d:c5:f1:d4:e6:b7:6a:46:4b:55:06:02:ac:21
+# SHA256 Fingerprint: f3:56:be:a2:44:b7:a9:1e:b3:5d:53:ca:9a:d7:86:4a:ce:01:8e:2d:35:d5:f8:f9:6d:df:68:a6:f4:1a:a4:74
+-----BEGIN CERTIFICATE-----
+MIIDdzCCAl+gAwIBAgIIXDPLYixfszIwDQYJKoZIhvcNAQELBQAwPDEeMBwGA1UE
+AwwVQXRvcyBUcnVzdGVkUm9vdCAyMDExMQ0wCwYDVQQKDARBdG9zMQswCQYDVQQG
+EwJERTAeFw0xMTA3MDcxNDU4MzBaFw0zMDEyMzEyMzU5NTlaMDwxHjAcBgNVBAMM
+FUF0b3MgVHJ1c3RlZFJvb3QgMjAxMTENMAsGA1UECgwEQXRvczELMAkGA1UEBhMC
+REUwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCVhTuXbyo7LjvPpvMp
+Nb7PGKw+qtn4TaA+Gke5vJrf8v7MPkfoepbCJI419KkM/IL9bcFyYie96mvr54rM
+VD6QUM+A1JX76LWC1BTFtqlVJVfbsVD2sGBkWXppzwO3bw2+yj5vdHLqqjAqc2K+
+SZFhyBH+DgMq92og3AIVDV4VavzjgsG1xZ1kCWyjWZgHJ8cblithdHFsQ/H3NYkQ
+4J7sVaE3IqKHBAUsR320HLliKWYoyrfhk/WklAOZuXCFteZI6o1Q/NnezG8HDt0L
+cp2AMBYHlT8oDv3FdU9T1nSatCQujgKRz3bFmx5VdJx4IbHwLfELn8LVlhgf8FQi
+eowHAgMBAAGjfTB7MB0GA1UdDgQWBBSnpQaxLKYJYO7Rl+lwrrw7GWzbITAPBgNV
+HRMBAf8EBTADAQH/MB8GA1UdIwQYMBaAFKelBrEspglg7tGX6XCuvDsZbNshMBgG
+A1UdIAQRMA8wDQYLKwYBBAGwLQMEAQEwDgYDVR0PAQH/BAQDAgGGMA0GCSqGSIb3
+DQEBCwUAA4IBAQAmdzTblEiGKkGdLD4GkGDEjKwLVLgfuXvTBznk+j57sj1O7Z8j
+vZfza1zv7v1Apt+hk6EKhqzvINB5Ab149xnYJDE0BAGmuhWawyfc2E8PzBhj/5kP
+DpFrdRbhIfzYJsdHt6bPWHJxfrrhTZVHO8mvbaG0weyJ9rQPOLXiZNwlz6bb65pc
+maHFCN795trV1lpFDMS3wrUU77QR/w4VtfX128a961qn8FYiqTxlVMYVqL2Gns2D
+lmh6cYGJ4Qvh6hEbaAjMaZ7snkGeRDImeuKHCnE96+RapNLbxc3G3mB/ufNPRJLv
+KrcYPqcZ2Qt9sTdBQrC6YB3y/gkRsPCHe6ed
+-----END CERTIFICATE-----
+
+# Issuer: CN=QuoVadis Root CA 1 G3 O=QuoVadis Limited
+# Subject: CN=QuoVadis Root CA 1 G3 O=QuoVadis Limited
+# Label: "QuoVadis Root CA 1 G3"
+# Serial: 687049649626669250736271037606554624078720034195
+# MD5 Fingerprint: a4:bc:5b:3f:fe:37:9a:fa:64:f0:e2:fa:05:3d:0b:ab
+# SHA1 Fingerprint: 1b:8e:ea:57:96:29:1a:c9:39:ea:b8:0a:81:1a:73:73:c0:93:79:67
+# SHA256 Fingerprint: 8a:86:6f:d1:b2:76:b5:7e:57:8e:92:1c:65:82:8a:2b:ed:58:e9:f2:f2:88:05:41:34:b7:f1:f4:bf:c9:cc:74
+-----BEGIN CERTIFICATE-----
+MIIFYDCCA0igAwIBAgIUeFhfLq0sGUvjNwc1NBMotZbUZZMwDQYJKoZIhvcNAQEL
+BQAwSDELMAkGA1UEBhMCQk0xGTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxHjAc
+BgNVBAMTFVF1b1ZhZGlzIFJvb3QgQ0EgMSBHMzAeFw0xMjAxMTIxNzI3NDRaFw00
+MjAxMTIxNzI3NDRaMEgxCzAJBgNVBAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBM
+aW1pdGVkMR4wHAYDVQQDExVRdW9WYWRpcyBSb290IENBIDEgRzMwggIiMA0GCSqG
+SIb3DQEBAQUAA4ICDwAwggIKAoICAQCgvlAQjunybEC0BJyFuTHK3C3kEakEPBtV
+wedYMB0ktMPvhd6MLOHBPd+C5k+tR4ds7FtJwUrVu4/sh6x/gpqG7D0DmVIB0jWe
+rNrwU8lmPNSsAgHaJNM7qAJGr6Qc4/hzWHa39g6QDbXwz8z6+cZM5cOGMAqNF341
+68Xfuw6cwI2H44g4hWf6Pser4BOcBRiYz5P1sZK0/CPTz9XEJ0ngnjybCKOLXSoh
+4Pw5qlPafX7PGglTvF0FBM+hSo+LdoINofjSxxR3W5A2B4GbPgb6Ul5jxaYA/qXp
+UhtStZI5cgMJYr2wYBZupt0lwgNm3fME0UDiTouG9G/lg6AnhF4EwfWQvTA9xO+o
+abw4m6SkltFi2mnAAZauy8RRNOoMqv8hjlmPSlzkYZqn0ukqeI1RPToV7qJZjqlc
+3sX5kCLliEVx3ZGZbHqfPT2YfF72vhZooF6uCyP8Wg+qInYtyaEQHeTTRCOQiJ/G
+KubX9ZqzWB4vMIkIG1SitZgj7Ah3HJVdYdHLiZxfokqRmu8hqkkWCKi9YSgxyXSt
+hfbZxbGL0eUQMk1fiyA6PEkfM4VZDdvLCXVDaXP7a3F98N/ETH3Goy7IlXnLc6KO
+Tk0k+17kBL5yG6YnLUlamXrXXAkgt3+UuU/xDRxeiEIbEbfnkduebPRq34wGmAOt
+zCjvpUfzUwIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIB
+BjAdBgNVHQ4EFgQUo5fW816iEOGrRZ88F2Q87gFwnMwwDQYJKoZIhvcNAQELBQAD
+ggIBABj6W3X8PnrHX3fHyt/PX8MSxEBd1DKquGrX1RUVRpgjpeaQWxiZTOOtQqOC
+MTaIzen7xASWSIsBx40Bz1szBpZGZnQdT+3Btrm0DWHMY37XLneMlhwqI2hrhVd2
+cDMT/uFPpiN3GPoajOi9ZcnPP/TJF9zrx7zABC4tRi9pZsMbj/7sPtPKlL92CiUN
+qXsCHKnQO18LwIE6PWThv6ctTr1NxNgpxiIY0MWscgKCP6o6ojoilzHdCGPDdRS5
+YCgtW2jgFqlmgiNR9etT2DGbe+m3nUvriBbP+V04ikkwj+3x6xn0dxoxGE1nVGwv
+b2X52z3sIexe9PSLymBlVNFxZPT5pqOBMzYzcfCkeF9OrYMh3jRJjehZrJ3ydlo2
+8hP0r+AJx2EqbPfgna67hkooby7utHnNkDPDs3b69fBsnQGQ+p6Q9pxyz0fawx/k
+NSBT8lTR32GDpgLiJTjehTItXnOQUl1CxM49S+H5GYQd1aJQzEH7QRTDvdbJWqNj
+ZgKAvQU6O0ec7AAmTPWIUb+oI38YB7AL7YsmoWTTYUrrXJ/es69nA7Mf3W1daWhp
+q1467HxpvMc7hU6eFbm0FU/DlXpY18ls6Wy58yljXrQs8C097Vpl4KlbQMJImYFt
+nh8GKjwStIsPm6Ik8KaN1nrgS7ZklmOVhMJKzRwuJIczYOXD
+-----END CERTIFICATE-----
+
+# Issuer: CN=QuoVadis Root CA 2 G3 O=QuoVadis Limited
+# Subject: CN=QuoVadis Root CA 2 G3 O=QuoVadis Limited
+# Label: "QuoVadis Root CA 2 G3"
+# Serial: 390156079458959257446133169266079962026824725800
+# MD5 Fingerprint: af:0c:86:6e:bf:40:2d:7f:0b:3e:12:50:ba:12:3d:06
+# SHA1 Fingerprint: 09:3c:61:f3:8b:8b:dc:7d:55:df:75:38:02:05:00:e1:25:f5:c8:36
+# SHA256 Fingerprint: 8f:e4:fb:0a:f9:3a:4d:0d:67:db:0b:eb:b2:3e:37:c7:1b:f3:25:dc:bc:dd:24:0e:a0:4d:af:58:b4:7e:18:40
+-----BEGIN CERTIFICATE-----
+MIIFYDCCA0igAwIBAgIURFc0JFuBiZs18s64KztbpybwdSgwDQYJKoZIhvcNAQEL
+BQAwSDELMAkGA1UEBhMCQk0xGTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxHjAc
+BgNVBAMTFVF1b1ZhZGlzIFJvb3QgQ0EgMiBHMzAeFw0xMjAxMTIxODU5MzJaFw00
+MjAxMTIxODU5MzJaMEgxCzAJBgNVBAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBM
+aW1pdGVkMR4wHAYDVQQDExVRdW9WYWRpcyBSb290IENBIDIgRzMwggIiMA0GCSqG
+SIb3DQEBAQUAA4ICDwAwggIKAoICAQChriWyARjcV4g/Ruv5r+LrI3HimtFhZiFf
+qq8nUeVuGxbULX1QsFN3vXg6YOJkApt8hpvWGo6t/x8Vf9WVHhLL5hSEBMHfNrMW
+n4rjyduYNM7YMxcoRvynyfDStNVNCXJJ+fKH46nafaF9a7I6JaltUkSs+L5u+9ym
+c5GQYaYDFCDy54ejiK2toIz/pgslUiXnFgHVy7g1gQyjO/Dh4fxaXc6AcW34Sas+
+O7q414AB+6XrW7PFXmAqMaCvN+ggOp+oMiwMzAkd056OXbxMmO7FGmh77FOm6RQ1
+o9/NgJ8MSPsc9PG/Srj61YxxSscfrf5BmrODXfKEVu+lV0POKa2Mq1W/xPtbAd0j
+IaFYAI7D0GoT7RPjEiuA3GfmlbLNHiJuKvhB1PLKFAeNilUSxmn1uIZoL1NesNKq
+IcGY5jDjZ1XHm26sGahVpkUG0CM62+tlXSoREfA7T8pt9DTEceT/AFr2XK4jYIVz
+8eQQsSWu1ZK7E8EM4DnatDlXtas1qnIhO4M15zHfeiFuuDIIfR0ykRVKYnLP43eh
+vNURG3YBZwjgQQvD6xVu+KQZ2aKrr+InUlYrAoosFCT5v0ICvybIxo/gbjh9Uy3l
+7ZizlWNof/k19N+IxWA1ksB8aRxhlRbQ694Lrz4EEEVlWFA4r0jyWbYW8jwNkALG
+cC4BrTwV1wIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIB
+BjAdBgNVHQ4EFgQU7edvdlq/YOxJW8ald7tyFnGbxD0wDQYJKoZIhvcNAQELBQAD
+ggIBAJHfgD9DCX5xwvfrs4iP4VGyvD11+ShdyLyZm3tdquXK4Qr36LLTn91nMX66
+AarHakE7kNQIXLJgapDwyM4DYvmL7ftuKtwGTTwpD4kWilhMSA/ohGHqPHKmd+RC
+roijQ1h5fq7KpVMNqT1wvSAZYaRsOPxDMuHBR//47PERIjKWnML2W2mWeyAMQ0Ga
+W/ZZGYjeVYg3UQt4XAoeo0L9x52ID8DyeAIkVJOviYeIyUqAHerQbj5hLja7NQ4n
+lv1mNDthcnPxFlxHBlRJAHpYErAK74X9sbgzdWqTHBLmYF5vHX/JHyPLhGGfHoJE
++V+tYlUkmlKY7VHnoX6XOuYvHxHaU4AshZ6rNRDbIl9qxV6XU/IyAgkwo1jwDQHV
+csaxfGl7w/U2Rcxhbl5MlMVerugOXou/983g7aEOGzPuVBj+D77vfoRrQ+NwmNtd
+dbINWQeFFSM51vHfqSYP1kjHs6Yi9TM3WpVHn3u6GBVv/9YUZINJ0gpnIdsPNWNg
+KCLjsZWDzYWm3S8P52dSbrsvhXz1SnPnxT7AvSESBT/8twNJAlvIJebiVDj1eYeM
+HVOyToV7BjjHLPj4sHKNJeV3UvQDHEimUF+IIDBu8oJDqz2XhOdT+yHBTw8imoa4
+WSr2Rz0ZiC3oheGe7IUIarFsNMkd7EgrO3jtZsSOeWmD3n+M
+-----END CERTIFICATE-----
+
+# Issuer: CN=QuoVadis Root CA 3 G3 O=QuoVadis Limited
+# Subject: CN=QuoVadis Root CA 3 G3 O=QuoVadis Limited
+# Label: "QuoVadis Root CA 3 G3"
+# Serial: 268090761170461462463995952157327242137089239581
+# MD5 Fingerprint: df:7d:b9:ad:54:6f:68:a1:df:89:57:03:97:43:b0:d7
+# SHA1 Fingerprint: 48:12:bd:92:3c:a8:c4:39:06:e7:30:6d:27:96:e6:a4:cf:22:2e:7d
+# SHA256 Fingerprint: 88:ef:81:de:20:2e:b0:18:45:2e:43:f8:64:72:5c:ea:5f:bd:1f:c2:d9:d2:05:73:07:09:c5:d8:b8:69:0f:46
+-----BEGIN CERTIFICATE-----
+MIIFYDCCA0igAwIBAgIULvWbAiin23r/1aOp7r0DoM8Sah0wDQYJKoZIhvcNAQEL
+BQAwSDELMAkGA1UEBhMCQk0xGTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxHjAc
+BgNVBAMTFVF1b1ZhZGlzIFJvb3QgQ0EgMyBHMzAeFw0xMjAxMTIyMDI2MzJaFw00
+MjAxMTIyMDI2MzJaMEgxCzAJBgNVBAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBM
+aW1pdGVkMR4wHAYDVQQDExVRdW9WYWRpcyBSb290IENBIDMgRzMwggIiMA0GCSqG
+SIb3DQEBAQUAA4ICDwAwggIKAoICAQCzyw4QZ47qFJenMioKVjZ/aEzHs286IxSR
+/xl/pcqs7rN2nXrpixurazHb+gtTTK/FpRp5PIpM/6zfJd5O2YIyC0TeytuMrKNu
+FoM7pmRLMon7FhY4futD4tN0SsJiCnMK3UmzV9KwCoWdcTzeo8vAMvMBOSBDGzXR
+U7Ox7sWTaYI+FrUoRqHe6okJ7UO4BUaKhvVZR74bbwEhELn9qdIoyhA5CcoTNs+c
+ra1AdHkrAj80//ogaX3T7mH1urPnMNA3I4ZyYUUpSFlob3emLoG+B01vr87ERROR
+FHAGjx+f+IdpsQ7vw4kZ6+ocYfx6bIrc1gMLnia6Et3UVDmrJqMz6nWB2i3ND0/k
+A9HvFZcba5DFApCTZgIhsUfei5pKgLlVj7WiL8DWM2fafsSntARE60f75li59wzw
+eyuxwHApw0BiLTtIadwjPEjrewl5qW3aqDCYz4ByA4imW0aucnl8CAMhZa634Ryl
+sSqiMd5mBPfAdOhx3v89WcyWJhKLhZVXGqtrdQtEPREoPHtht+KPZ0/l7DxMYIBp
+VzgeAVuNVejH38DMdyM0SXV89pgR6y3e7UEuFAUCf+D+IOs15xGsIs5XPd7JMG0Q
+A4XN8f+MFrXBsj6IbGB/kE+V9/YtrQE5BwT6dYB9v0lQ7e/JxHwc64B+27bQ3RP+
+ydOc17KXqQIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIB
+BjAdBgNVHQ4EFgQUxhfQvKjqAkPyGwaZXSuQILnXnOQwDQYJKoZIhvcNAQELBQAD
+ggIBADRh2Va1EodVTd2jNTFGu6QHcrxfYWLopfsLN7E8trP6KZ1/AvWkyaiTt3px
+KGmPc+FSkNrVvjrlt3ZqVoAh313m6Tqe5T72omnHKgqwGEfcIHB9UqM+WXzBusnI
+FUBhynLWcKzSt/Ac5IYp8M7vaGPQtSCKFWGafoaYtMnCdvvMujAWzKNhxnQT5Wvv
+oxXqA/4Ti2Tk08HS6IT7SdEQTXlm66r99I0xHnAUrdzeZxNMgRVhvLfZkXdxGYFg
+u/BYpbWcC/ePIlUnwEsBbTuZDdQdm2NnL9DuDcpmvJRPpq3t/O5jrFc/ZSXPsoaP
+0Aj/uHYUbt7lJ+yreLVTubY/6CD50qi+YUbKh4yE8/nxoGibIh6BJpsQBJFxwAYf
+3KDTuVan45gtf4Od34wrnDKOMpTwATwiKp9Dwi7DmDkHOHv8XgBCH/MyJnmDhPbl
+8MFREsALHgQjDFSlTC9JxUrRtm5gDWv8a4uFJGS3iQ6rJUdbPM9+Sb3H6QrG2vd+
+DhcI00iX0HGS8A85PjRqHH3Y8iKuu2n0M7SmSFXRDw4m6Oy2Cy2nhTXN/VnIn9HN
+PlopNLk9hM6xZdRZkZFWdSHBd575euFgndOtBBj0fOtek49TSiIp+EgrPk2GrFt/
+ywaZWWDYWGWVjUTR939+J399roD1B0y2PpxxVJkES/1Y+Zj0
+-----END CERTIFICATE-----
+
+# Issuer: CN=DigiCert Assured ID Root G2 O=DigiCert Inc OU=www.digicert.com
+# Subject: CN=DigiCert Assured ID Root G2 O=DigiCert Inc OU=www.digicert.com
+# Label: "DigiCert Assured ID Root G2"
+# Serial: 15385348160840213938643033620894905419
+# MD5 Fingerprint: 92:38:b9:f8:63:24:82:65:2c:57:33:e6:fe:81:8f:9d
+# SHA1 Fingerprint: a1:4b:48:d9:43:ee:0a:0e:40:90:4f:3c:e0:a4:c0:91:93:51:5d:3f
+# SHA256 Fingerprint: 7d:05:eb:b6:82:33:9f:8c:94:51:ee:09:4e:eb:fe:fa:79:53:a1:14:ed:b2:f4:49:49:45:2f:ab:7d:2f:c1:85
+-----BEGIN CERTIFICATE-----
+MIIDljCCAn6gAwIBAgIQC5McOtY5Z+pnI7/Dr5r0SzANBgkqhkiG9w0BAQsFADBl
+MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3
+d3cuZGlnaWNlcnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJv
+b3QgRzIwHhcNMTMwODAxMTIwMDAwWhcNMzgwMTE1MTIwMDAwWjBlMQswCQYDVQQG
+EwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNl
+cnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJvb3QgRzIwggEi
+MA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDZ5ygvUj82ckmIkzTz+GoeMVSA
+n61UQbVH35ao1K+ALbkKz3X9iaV9JPrjIgwrvJUXCzO/GU1BBpAAvQxNEP4Htecc
+biJVMWWXvdMX0h5i89vqbFCMP4QMls+3ywPgym2hFEwbid3tALBSfK+RbLE4E9Hp
+EgjAALAcKxHad3A2m67OeYfcgnDmCXRwVWmvo2ifv922ebPynXApVfSr/5Vh88lA
+bx3RvpO704gqu52/clpWcTs/1PPRCv4o76Pu2ZmvA9OPYLfykqGxvYmJHzDNw6Yu
+YjOuFgJ3RFrngQo8p0Quebg/BLxcoIfhG69Rjs3sLPr4/m3wOnyqi+RnlTGNAgMB
+AAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgGGMB0GA1UdDgQW
+BBTOw0q5mVXyuNtgv6l+vVa1lzan1jANBgkqhkiG9w0BAQsFAAOCAQEAyqVVjOPI
+QW5pJ6d1Ee88hjZv0p3GeDgdaZaikmkuOGybfQTUiaWxMTeKySHMq2zNixya1r9I
+0jJmwYrA8y8678Dj1JGG0VDjA9tzd29KOVPt3ibHtX2vK0LRdWLjSisCx1BL4Gni
+lmwORGYQRI+tBev4eaymG+g3NJ1TyWGqolKvSnAWhsI6yLETcDbYz+70CjTVW0z9
+B5yiutkBclzzTcHdDrEcDcRjvq30FPuJ7KJBDkzMyFdA0G4Dqs0MjomZmWzwPDCv
+ON9vvKO+KSAnq3T/EyJ43pdSVR6DtVQgA+6uwE9W3jfMw3+qBCe703e4YtsXfJwo
+IhNzbM8m9Yop5w==
+-----END CERTIFICATE-----
+
+# Issuer: CN=DigiCert Assured ID Root G3 O=DigiCert Inc OU=www.digicert.com
+# Subject: CN=DigiCert Assured ID Root G3 O=DigiCert Inc OU=www.digicert.com
+# Label: "DigiCert Assured ID Root G3"
+# Serial: 15459312981008553731928384953135426796
+# MD5 Fingerprint: 7c:7f:65:31:0c:81:df:8d:ba:3e:99:e2:5c:ad:6e:fb
+# SHA1 Fingerprint: f5:17:a2:4f:9a:48:c6:c9:f8:a2:00:26:9f:dc:0f:48:2c:ab:30:89
+# SHA256 Fingerprint: 7e:37:cb:8b:4c:47:09:0c:ab:36:55:1b:a6:f4:5d:b8:40:68:0f:ba:16:6a:95:2d:b1:00:71:7f:43:05:3f:c2
+-----BEGIN CERTIFICATE-----
+MIICRjCCAc2gAwIBAgIQC6Fa+h3foLVJRK/NJKBs7DAKBggqhkjOPQQDAzBlMQsw
+CQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cu
+ZGlnaWNlcnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJvb3Qg
+RzMwHhcNMTMwODAxMTIwMDAwWhcNMzgwMTE1MTIwMDAwWjBlMQswCQYDVQQGEwJV
+UzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNlcnQu
+Y29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJvb3QgRzMwdjAQBgcq
+hkjOPQIBBgUrgQQAIgNiAAQZ57ysRGXtzbg/WPuNsVepRC0FFfLvC/8QdJ+1YlJf
+Zn4f5dwbRXkLzMZTCp2NXQLZqVneAlr2lSoOjThKiknGvMYDOAdfVdp+CW7if17Q
+RSAPWXYQ1qAk8C3eNvJsKTmjQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/
+BAQDAgGGMB0GA1UdDgQWBBTL0L2p4ZgFUaFNN6KDec6NHSrkhDAKBggqhkjOPQQD
+AwNnADBkAjAlpIFFAmsSS3V0T8gj43DydXLefInwz5FyYZ5eEJJZVrmDxxDnOOlY
+JjZ91eQ0hjkCMHw2U/Aw5WJjOpnitqM7mzT6HtoQknFekROn3aRukswy1vUhZscv
+6pZjamVFkpUBtA==
+-----END CERTIFICATE-----
+
+# Issuer: CN=DigiCert Global Root G2 O=DigiCert Inc OU=www.digicert.com
+# Subject: CN=DigiCert Global Root G2 O=DigiCert Inc OU=www.digicert.com
+# Label: "DigiCert Global Root G2"
+# Serial: 4293743540046975378534879503202253541
+# MD5 Fingerprint: e4:a6:8a:c8:54:ac:52:42:46:0a:fd:72:48:1b:2a:44
+# SHA1 Fingerprint: df:3c:24:f9:bf:d6:66:76:1b:26:80:73:fe:06:d1:cc:8d:4f:82:a4
+# SHA256 Fingerprint: cb:3c:cb:b7:60:31:e5:e0:13:8f:8d:d3:9a:23:f9:de:47:ff:c3:5e:43:c1:14:4c:ea:27:d4:6a:5a:b1:cb:5f
+-----BEGIN CERTIFICATE-----
+MIIDjjCCAnagAwIBAgIQAzrx5qcRqaC7KGSxHQn65TANBgkqhkiG9w0BAQsFADBh
+MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3
+d3cuZGlnaWNlcnQuY29tMSAwHgYDVQQDExdEaWdpQ2VydCBHbG9iYWwgUm9vdCBH
+MjAeFw0xMzA4MDExMjAwMDBaFw0zODAxMTUxMjAwMDBaMGExCzAJBgNVBAYTAlVT
+MRUwEwYDVQQKEwxEaWdpQ2VydCBJbmMxGTAXBgNVBAsTEHd3dy5kaWdpY2VydC5j
+b20xIDAeBgNVBAMTF0RpZ2lDZXJ0IEdsb2JhbCBSb290IEcyMIIBIjANBgkqhkiG
+9w0BAQEFAAOCAQ8AMIIBCgKCAQEAuzfNNNx7a8myaJCtSnX/RrohCgiN9RlUyfuI
+2/Ou8jqJkTx65qsGGmvPrC3oXgkkRLpimn7Wo6h+4FR1IAWsULecYxpsMNzaHxmx
+1x7e/dfgy5SDN67sH0NO3Xss0r0upS/kqbitOtSZpLYl6ZtrAGCSYP9PIUkY92eQ
+q2EGnI/yuum06ZIya7XzV+hdG82MHauVBJVJ8zUtluNJbd134/tJS7SsVQepj5Wz
+tCO7TG1F8PapspUwtP1MVYwnSlcUfIKdzXOS0xZKBgyMUNGPHgm+F6HmIcr9g+UQ
+vIOlCsRnKPZzFBQ9RnbDhxSJITRNrw9FDKZJobq7nMWxM4MphQIDAQABo0IwQDAP
+BgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBhjAdBgNVHQ4EFgQUTiJUIBiV
+5uNu5g/6+rkS7QYXjzkwDQYJKoZIhvcNAQELBQADggEBAGBnKJRvDkhj6zHd6mcY
+1Yl9PMWLSn/pvtsrF9+wX3N3KjITOYFnQoQj8kVnNeyIv/iPsGEMNKSuIEyExtv4
+NeF22d+mQrvHRAiGfzZ0JFrabA0UWTW98kndth/Jsw1HKj2ZL7tcu7XUIOGZX1NG
+Fdtom/DzMNU+MeKNhJ7jitralj41E6Vf8PlwUHBHQRFXGU7Aj64GxJUTFy8bJZ91
+8rGOmaFvE7FBcf6IKshPECBV1/MUReXgRPTqh5Uykw7+U0b6LJ3/iyK5S9kJRaTe
+pLiaWN0bfVKfjllDiIGknibVb63dDcY3fe0Dkhvld1927jyNxF1WW6LZZm6zNTfl
+MrY=
+-----END CERTIFICATE-----
+
+# Issuer: CN=DigiCert Global Root G3 O=DigiCert Inc OU=www.digicert.com
+# Subject: CN=DigiCert Global Root G3 O=DigiCert Inc OU=www.digicert.com
+# Label: "DigiCert Global Root G3"
+# Serial: 7089244469030293291760083333884364146
+# MD5 Fingerprint: f5:5d:a4:50:a5:fb:28:7e:1e:0f:0d:cc:96:57:56:ca
+# SHA1 Fingerprint: 7e:04:de:89:6a:3e:66:6d:00:e6:87:d3:3f:fa:d9:3b:e8:3d:34:9e
+# SHA256 Fingerprint: 31:ad:66:48:f8:10:41:38:c7:38:f3:9e:a4:32:01:33:39:3e:3a:18:cc:02:29:6e:f9:7c:2a:c9:ef:67:31:d0
+-----BEGIN CERTIFICATE-----
+MIICPzCCAcWgAwIBAgIQBVVWvPJepDU1w6QP1atFcjAKBggqhkjOPQQDAzBhMQsw
+CQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cu
+ZGlnaWNlcnQuY29tMSAwHgYDVQQDExdEaWdpQ2VydCBHbG9iYWwgUm9vdCBHMzAe
+Fw0xMzA4MDExMjAwMDBaFw0zODAxMTUxMjAwMDBaMGExCzAJBgNVBAYTAlVTMRUw
+EwYDVQQKEwxEaWdpQ2VydCBJbmMxGTAXBgNVBAsTEHd3dy5kaWdpY2VydC5jb20x
+IDAeBgNVBAMTF0RpZ2lDZXJ0IEdsb2JhbCBSb290IEczMHYwEAYHKoZIzj0CAQYF
+K4EEACIDYgAE3afZu4q4C/sLfyHS8L6+c/MzXRq8NOrexpu80JX28MzQC7phW1FG
+fp4tn+6OYwwX7Adw9c+ELkCDnOg/QW07rdOkFFk2eJ0DQ+4QE2xy3q6Ip6FrtUPO
+Z9wj/wMco+I+o0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBhjAd
+BgNVHQ4EFgQUs9tIpPmhxdiuNkHMEWNpYim8S8YwCgYIKoZIzj0EAwMDaAAwZQIx
+AK288mw/EkrRLTnDCgmXc/SINoyIJ7vmiI1Qhadj+Z4y3maTD/HMsQmP3Wyr+mt/
+oAIwOWZbwmSNuJ5Q3KjVSaLtx9zRSX8XAbjIho9OjIgrqJqpisXRAL34VOKa5Vt8
+sycX
+-----END CERTIFICATE-----
+
+# Issuer: CN=DigiCert Trusted Root G4 O=DigiCert Inc OU=www.digicert.com
+# Subject: CN=DigiCert Trusted Root G4 O=DigiCert Inc OU=www.digicert.com
+# Label: "DigiCert Trusted Root G4"
+# Serial: 7451500558977370777930084869016614236
+# MD5 Fingerprint: 78:f2:fc:aa:60:1f:2f:b4:eb:c9:37:ba:53:2e:75:49
+# SHA1 Fingerprint: dd:fb:16:cd:49:31:c9:73:a2:03:7d:3f:c8:3a:4d:7d:77:5d:05:e4
+# SHA256 Fingerprint: 55:2f:7b:dc:f1:a7:af:9e:6c:e6:72:01:7f:4f:12:ab:f7:72:40:c7:8e:76:1a:c2:03:d1:d9:d2:0a:c8:99:88
+-----BEGIN CERTIFICATE-----
+MIIFkDCCA3igAwIBAgIQBZsbV56OITLiOQe9p3d1XDANBgkqhkiG9w0BAQwFADBi
+MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3
+d3cuZGlnaWNlcnQuY29tMSEwHwYDVQQDExhEaWdpQ2VydCBUcnVzdGVkIFJvb3Qg
+RzQwHhcNMTMwODAxMTIwMDAwWhcNMzgwMTE1MTIwMDAwWjBiMQswCQYDVQQGEwJV
+UzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNlcnQu
+Y29tMSEwHwYDVQQDExhEaWdpQ2VydCBUcnVzdGVkIFJvb3QgRzQwggIiMA0GCSqG
+SIb3DQEBAQUAA4ICDwAwggIKAoICAQC/5pBzaN675F1KPDAiMGkz7MKnJS7JIT3y
+ithZwuEppz1Yq3aaza57G4QNxDAf8xukOBbrVsaXbR2rsnnyyhHS5F/WBTxSD1If
+xp4VpX6+n6lXFllVcq9ok3DCsrp1mWpzMpTREEQQLt+C8weE5nQ7bXHiLQwb7iDV
+ySAdYyktzuxeTsiT+CFhmzTrBcZe7FsavOvJz82sNEBfsXpm7nfISKhmV1efVFiO
+DCu3T6cw2Vbuyntd463JT17lNecxy9qTXtyOj4DatpGYQJB5w3jHtrHEtWoYOAMQ
+jdjUN6QuBX2I9YI+EJFwq1WCQTLX2wRzKm6RAXwhTNS8rhsDdV14Ztk6MUSaM0C/
+CNdaSaTC5qmgZ92kJ7yhTzm1EVgX9yRcRo9k98FpiHaYdj1ZXUJ2h4mXaXpI8OCi
+EhtmmnTK3kse5w5jrubU75KSOp493ADkRSWJtppEGSt+wJS00mFt6zPZxd9LBADM
+fRyVw4/3IbKyEbe7f/LVjHAsQWCqsWMYRJUadmJ+9oCw++hkpjPRiQfhvbfmQ6QY
+uKZ3AeEPlAwhHbJUKSWJbOUOUlFHdL4mrLZBdd56rF+NP8m800ERElvlEFDrMcXK
+chYiCd98THU/Y+whX8QgUWtvsauGi0/C1kVfnSD8oR7FwI+isX4KJpn15GkvmB0t
+9dmpsh3lGwIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIB
+hjAdBgNVHQ4EFgQU7NfjgtJxXWRM3y5nP+e6mK4cD08wDQYJKoZIhvcNAQEMBQAD
+ggIBALth2X2pbL4XxJEbw6GiAI3jZGgPVs93rnD5/ZpKmbnJeFwMDF/k5hQpVgs2
+SV1EY+CtnJYYZhsjDT156W1r1lT40jzBQ0CuHVD1UvyQO7uYmWlrx8GnqGikJ9yd
++SeuMIW59mdNOj6PWTkiU0TryF0Dyu1Qen1iIQqAyHNm0aAFYF/opbSnr6j3bTWc
+fFqK1qI4mfN4i/RN0iAL3gTujJtHgXINwBQy7zBZLq7gcfJW5GqXb5JQbZaNaHqa
+sjYUegbyJLkJEVDXCLG4iXqEI2FCKeWjzaIgQdfRnGTZ6iahixTXTBmyUEFxPT9N
+cCOGDErcgdLMMpSEDQgJlxxPwO5rIHQw0uA5NBCFIRUBCOhVMt5xSdkoF1BN5r5N
+0XWs0Mr7QbhDparTwwVETyw2m+L64kW4I1NsBm9nVX9GtUw/bihaeSbSpKhil9Ie
+4u1Ki7wb/UdKDd9nZn6yW0HQO+T0O/QEY+nvwlQAUaCKKsnOeMzV6ocEGLPOr0mI
+r/OSmbaz5mEP0oUA51Aa5BuVnRmhuZyxm7EAHu/QD09CbMkKvO5D+jpxpchNJqU1
+/YldvIViHTLSoCtU7ZpXwdv6EM8Zt4tKG48BtieVU+i2iW1bvGjUI+iLUaJW+fCm
+gKDWHrO8Dw9TdSmq6hN35N6MgSGtBxBHEa2HPQfRdbzP82Z+
+-----END CERTIFICATE-----
+
+# Issuer: CN=COMODO RSA Certification Authority O=COMODO CA Limited
+# Subject: CN=COMODO RSA Certification Authority O=COMODO CA Limited
+# Label: "COMODO RSA Certification Authority"
+# Serial: 101909084537582093308941363524873193117
+# MD5 Fingerprint: 1b:31:b0:71:40:36:cc:14:36:91:ad:c4:3e:fd:ec:18
+# SHA1 Fingerprint: af:e5:d2:44:a8:d1:19:42:30:ff:47:9f:e2:f8:97:bb:cd:7a:8c:b4
+# SHA256 Fingerprint: 52:f0:e1:c4:e5:8e:c6:29:29:1b:60:31:7f:07:46:71:b8:5d:7e:a8:0d:5b:07:27:34:63:53:4b:32:b4:02:34
+-----BEGIN CERTIFICATE-----
+MIIF2DCCA8CgAwIBAgIQTKr5yttjb+Af907YWwOGnTANBgkqhkiG9w0BAQwFADCB
+hTELMAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4G
+A1UEBxMHU2FsZm9yZDEaMBgGA1UEChMRQ09NT0RPIENBIExpbWl0ZWQxKzApBgNV
+BAMTIkNPTU9ETyBSU0EgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMTAwMTE5
+MDAwMDAwWhcNMzgwMTE4MjM1OTU5WjCBhTELMAkGA1UEBhMCR0IxGzAZBgNVBAgT
+EkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UEBxMHU2FsZm9yZDEaMBgGA1UEChMR
+Q09NT0RPIENBIExpbWl0ZWQxKzApBgNVBAMTIkNPTU9ETyBSU0EgQ2VydGlmaWNh
+dGlvbiBBdXRob3JpdHkwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCR
+6FSS0gpWsawNJN3Fz0RndJkrN6N9I3AAcbxT38T6KhKPS38QVr2fcHK3YX/JSw8X
+pz3jsARh7v8Rl8f0hj4K+j5c+ZPmNHrZFGvnnLOFoIJ6dq9xkNfs/Q36nGz637CC
+9BR++b7Epi9Pf5l/tfxnQ3K9DADWietrLNPtj5gcFKt+5eNu/Nio5JIk2kNrYrhV
+/erBvGy2i/MOjZrkm2xpmfh4SDBF1a3hDTxFYPwyllEnvGfDyi62a+pGx8cgoLEf
+Zd5ICLqkTqnyg0Y3hOvozIFIQ2dOciqbXL1MGyiKXCJ7tKuY2e7gUYPDCUZObT6Z
++pUX2nwzV0E8jVHtC7ZcryxjGt9XyD+86V3Em69FmeKjWiS0uqlWPc9vqv9JWL7w
+qP/0uK3pN/u6uPQLOvnoQ0IeidiEyxPx2bvhiWC4jChWrBQdnArncevPDt09qZah
+SL0896+1DSJMwBGB7FY79tOi4lu3sgQiUpWAk2nojkxl8ZEDLXB0AuqLZxUpaVIC
+u9ffUGpVRr+goyhhf3DQw6KqLCGqR84onAZFdr+CGCe01a60y1Dma/RMhnEw6abf
+Fobg2P9A3fvQQoh/ozM6LlweQRGBY84YcWsr7KaKtzFcOmpH4MN5WdYgGq/yapiq
+crxXStJLnbsQ/LBMQeXtHT1eKJ2czL+zUdqnR+WEUwIDAQABo0IwQDAdBgNVHQ4E
+FgQUu69+Aj36pvE8hI6t7jiY7NkyMtQwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB
+/wQFMAMBAf8wDQYJKoZIhvcNAQEMBQADggIBAArx1UaEt65Ru2yyTUEUAJNMnMvl
+wFTPoCWOAvn9sKIN9SCYPBMtrFaisNZ+EZLpLrqeLppysb0ZRGxhNaKatBYSaVqM
+4dc+pBroLwP0rmEdEBsqpIt6xf4FpuHA1sj+nq6PK7o9mfjYcwlYRm6mnPTXJ9OV
+2jeDchzTc+CiR5kDOF3VSXkAKRzH7JsgHAckaVd4sjn8OoSgtZx8jb8uk2Intzna
+FxiuvTwJaP+EmzzV1gsD41eeFPfR60/IvYcjt7ZJQ3mFXLrrkguhxuhoqEwWsRqZ
+CuhTLJK7oQkYdQxlqHvLI7cawiiFwxv/0Cti76R7CZGYZ4wUAc1oBmpjIXUDgIiK
+boHGhfKppC3n9KUkEEeDys30jXlYsQab5xoq2Z0B15R97QNKyvDb6KkBPvVWmcke
+jkk9u+UJueBPSZI9FoJAzMxZxuY67RIuaTxslbH9qh17f4a+Hg4yRvv7E491f0yL
+S0Zj/gA0QHDBw7mh3aZw4gSzQbzpgJHqZJx64SIDqZxubw5lT2yHh17zbqD5daWb
+QOhTsiedSrnAdyGN/4fy3ryM7xfft0kL0fJuMAsaDk527RH89elWsn2/x20Kk4yl
+0MC2Hb46TpSi125sC8KKfPog88Tk5c0NqMuRkrF8hey1FGlmDoLnzc7ILaZRfyHB
+NVOFBkpdn627G190
+-----END CERTIFICATE-----
+
+# Issuer: CN=USERTrust RSA Certification Authority O=The USERTRUST Network
+# Subject: CN=USERTrust RSA Certification Authority O=The USERTRUST Network
+# Label: "USERTrust RSA Certification Authority"
+# Serial: 2645093764781058787591871645665788717
+# MD5 Fingerprint: 1b:fe:69:d1:91:b7:19:33:a3:72:a8:0f:e1:55:e5:b5
+# SHA1 Fingerprint: 2b:8f:1b:57:33:0d:bb:a2:d0:7a:6c:51:f7:0e:e9:0d:da:b9:ad:8e
+# SHA256 Fingerprint: e7:93:c9:b0:2f:d8:aa:13:e2:1c:31:22:8a:cc:b0:81:19:64:3b:74:9c:89:89:64:b1:74:6d:46:c3:d4:cb:d2
+-----BEGIN CERTIFICATE-----
+MIIF3jCCA8agAwIBAgIQAf1tMPyjylGoG7xkDjUDLTANBgkqhkiG9w0BAQwFADCB
+iDELMAkGA1UEBhMCVVMxEzARBgNVBAgTCk5ldyBKZXJzZXkxFDASBgNVBAcTC0pl
+cnNleSBDaXR5MR4wHAYDVQQKExVUaGUgVVNFUlRSVVNUIE5ldHdvcmsxLjAsBgNV
+BAMTJVVTRVJUcnVzdCBSU0EgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMTAw
+MjAxMDAwMDAwWhcNMzgwMTE4MjM1OTU5WjCBiDELMAkGA1UEBhMCVVMxEzARBgNV
+BAgTCk5ldyBKZXJzZXkxFDASBgNVBAcTC0plcnNleSBDaXR5MR4wHAYDVQQKExVU
+aGUgVVNFUlRSVVNUIE5ldHdvcmsxLjAsBgNVBAMTJVVTRVJUcnVzdCBSU0EgQ2Vy
+dGlmaWNhdGlvbiBBdXRob3JpdHkwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIK
+AoICAQCAEmUXNg7D2wiz0KxXDXbtzSfTTK1Qg2HiqiBNCS1kCdzOiZ/MPans9s/B
+3PHTsdZ7NygRK0faOca8Ohm0X6a9fZ2jY0K2dvKpOyuR+OJv0OwWIJAJPuLodMkY
+tJHUYmTbf6MG8YgYapAiPLz+E/CHFHv25B+O1ORRxhFnRghRy4YUVD+8M/5+bJz/
+Fp0YvVGONaanZshyZ9shZrHUm3gDwFA66Mzw3LyeTP6vBZY1H1dat//O+T23LLb2
+VN3I5xI6Ta5MirdcmrS3ID3KfyI0rn47aGYBROcBTkZTmzNg95S+UzeQc0PzMsNT
+79uq/nROacdrjGCT3sTHDN/hMq7MkztReJVni+49Vv4M0GkPGw/zJSZrM233bkf6
+c0Plfg6lZrEpfDKEY1WJxA3Bk1QwGROs0303p+tdOmw1XNtB1xLaqUkL39iAigmT
+Yo61Zs8liM2EuLE/pDkP2QKe6xJMlXzzawWpXhaDzLhn4ugTncxbgtNMs+1b/97l
+c6wjOy0AvzVVdAlJ2ElYGn+SNuZRkg7zJn0cTRe8yexDJtC/QV9AqURE9JnnV4ee
+UB9XVKg+/XRjL7FQZQnmWEIuQxpMtPAlR1n6BB6T1CZGSlCBst6+eLf8ZxXhyVeE
+Hg9j1uliutZfVS7qXMYoCAQlObgOK6nyTJccBz8NUvXt7y+CDwIDAQABo0IwQDAd
+BgNVHQ4EFgQUU3m/WqorSs9UgOHYm8Cd8rIDZsswDgYDVR0PAQH/BAQDAgEGMA8G
+A1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEMBQADggIBAFzUfA3P9wF9QZllDHPF
+Up/L+M+ZBn8b2kMVn54CVVeWFPFSPCeHlCjtHzoBN6J2/FNQwISbxmtOuowhT6KO
+VWKR82kV2LyI48SqC/3vqOlLVSoGIG1VeCkZ7l8wXEskEVX/JJpuXior7gtNn3/3
+ATiUFJVDBwn7YKnuHKsSjKCaXqeYalltiz8I+8jRRa8YFWSQEg9zKC7F4iRO/Fjs
+8PRF/iKz6y+O0tlFYQXBl2+odnKPi4w2r78NBc5xjeambx9spnFixdjQg3IM8WcR
+iQycE0xyNN+81XHfqnHd4blsjDwSXWXavVcStkNr/+XeTWYRUc+ZruwXtuhxkYze
+Sf7dNXGiFSeUHM9h4ya7b6NnJSFd5t0dCy5oGzuCr+yDZ4XUmFF0sbmZgIn/f3gZ
+XHlKYC6SQK5MNyosycdiyA5d9zZbyuAlJQG03RoHnHcAP9Dc1ew91Pq7P8yF1m9/
+qS3fuQL39ZeatTXaw2ewh0qpKJ4jjv9cJ2vhsE/zB+4ALtRZh8tSQZXq9EfX7mRB
+VXyNWQKV3WKdwrnuWih0hKWbt5DHDAff9Yk2dDLWKMGwsAvgnEzDHNb842m1R0aB
+L6KCq9NjRHDEjf8tM7qtj3u1cIiuPhnPQCjY/MiQu12ZIvVS5ljFH4gxQ+6IHdfG
+jjxDah2nGN59PRbxYvnKkKj9
+-----END CERTIFICATE-----
+
+# Issuer: CN=USERTrust ECC Certification Authority O=The USERTRUST Network
+# Subject: CN=USERTrust ECC Certification Authority O=The USERTRUST Network
+# Label: "USERTrust ECC Certification Authority"
+# Serial: 123013823720199481456569720443997572134
+# MD5 Fingerprint: fa:68:bc:d9:b5:7f:ad:fd:c9:1d:06:83:28:cc:24:c1
+# SHA1 Fingerprint: d1:cb:ca:5d:b2:d5:2a:7f:69:3b:67:4d:e5:f0:5a:1d:0c:95:7d:f0
+# SHA256 Fingerprint: 4f:f4:60:d5:4b:9c:86:da:bf:bc:fc:57:12:e0:40:0d:2b:ed:3f:bc:4d:4f:bd:aa:86:e0:6a:dc:d2:a9:ad:7a
+-----BEGIN CERTIFICATE-----
+MIICjzCCAhWgAwIBAgIQXIuZxVqUxdJxVt7NiYDMJjAKBggqhkjOPQQDAzCBiDEL
+MAkGA1UEBhMCVVMxEzARBgNVBAgTCk5ldyBKZXJzZXkxFDASBgNVBAcTC0plcnNl
+eSBDaXR5MR4wHAYDVQQKExVUaGUgVVNFUlRSVVNUIE5ldHdvcmsxLjAsBgNVBAMT
+JVVTRVJUcnVzdCBFQ0MgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMTAwMjAx
+MDAwMDAwWhcNMzgwMTE4MjM1OTU5WjCBiDELMAkGA1UEBhMCVVMxEzARBgNVBAgT
+Ck5ldyBKZXJzZXkxFDASBgNVBAcTC0plcnNleSBDaXR5MR4wHAYDVQQKExVUaGUg
+VVNFUlRSVVNUIE5ldHdvcmsxLjAsBgNVBAMTJVVTRVJUcnVzdCBFQ0MgQ2VydGlm
+aWNhdGlvbiBBdXRob3JpdHkwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAAQarFRaqflo
+I+d61SRvU8Za2EurxtW20eZzca7dnNYMYf3boIkDuAUU7FfO7l0/4iGzzvfUinng
+o4N+LZfQYcTxmdwlkWOrfzCjtHDix6EznPO/LlxTsV+zfTJ/ijTjeXmjQjBAMB0G
+A1UdDgQWBBQ64QmG1M8ZwpZ2dEl23OA1xmNjmjAOBgNVHQ8BAf8EBAMCAQYwDwYD
+VR0TAQH/BAUwAwEB/zAKBggqhkjOPQQDAwNoADBlAjA2Z6EWCNzklwBBHU6+4WMB
+zzuqQhFkoJ2UOQIReVx7Hfpkue4WQrO/isIJxOzksU0CMQDpKmFHjFJKS04YcPbW
+RNZu9YO6bVi9JNlWSOrvxKJGgYhqOkbRqZtNyWHa0V1Xahg=
+-----END CERTIFICATE-----
+
+# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign ECC Root CA - R5
+# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign ECC Root CA - R5
+# Label: "GlobalSign ECC Root CA - R5"
+# Serial: 32785792099990507226680698011560947931244
+# MD5 Fingerprint: 9f:ad:3b:1c:02:1e:8a:ba:17:74:38:81:0c:a2:bc:08
+# SHA1 Fingerprint: 1f:24:c6:30:cd:a4:18:ef:20:69:ff:ad:4f:dd:5f:46:3a:1b:69:aa
+# SHA256 Fingerprint: 17:9f:bc:14:8a:3d:d0:0f:d2:4e:a1:34:58:cc:43:bf:a7:f5:9c:81:82:d7:83:a5:13:f6:eb:ec:10:0c:89:24
+-----BEGIN CERTIFICATE-----
+MIICHjCCAaSgAwIBAgIRYFlJ4CYuu1X5CneKcflK2GwwCgYIKoZIzj0EAwMwUDEk
+MCIGA1UECxMbR2xvYmFsU2lnbiBFQ0MgUm9vdCBDQSAtIFI1MRMwEQYDVQQKEwpH
+bG9iYWxTaWduMRMwEQYDVQQDEwpHbG9iYWxTaWduMB4XDTEyMTExMzAwMDAwMFoX
+DTM4MDExOTAzMTQwN1owUDEkMCIGA1UECxMbR2xvYmFsU2lnbiBFQ0MgUm9vdCBD
+QSAtIFI1MRMwEQYDVQQKEwpHbG9iYWxTaWduMRMwEQYDVQQDEwpHbG9iYWxTaWdu
+MHYwEAYHKoZIzj0CAQYFK4EEACIDYgAER0UOlvt9Xb/pOdEh+J8LttV7HpI6SFkc
+8GIxLcB6KP4ap1yztsyX50XUWPrRd21DosCHZTQKH3rd6zwzocWdTaRvQZU4f8ke
+hOvRnkmSh5SHDDqFSmafnVmTTZdhBoZKo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYD
+VR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUPeYpSJvqB8ohREom3m7e0oPQn1kwCgYI
+KoZIzj0EAwMDaAAwZQIxAOVpEslu28YxuglB4Zf4+/2a4n0Sye18ZNPLBSWLVtmg
+515dTguDnFt2KaAJJiFqYgIwcdK1j1zqO+F4CYWodZI7yFz9SO8NdCKoCOJuxUnO
+xwy8p2Fp8fc74SrL+SvzZpA3
+-----END CERTIFICATE-----
+
+# Issuer: CN=IdenTrust Commercial Root CA 1 O=IdenTrust
+# Subject: CN=IdenTrust Commercial Root CA 1 O=IdenTrust
+# Label: "IdenTrust Commercial Root CA 1"
+# Serial: 13298821034946342390520003877796839426
+# MD5 Fingerprint: b3:3e:77:73:75:ee:a0:d3:e3:7e:49:63:49:59:bb:c7
+# SHA1 Fingerprint: df:71:7e:aa:4a:d9:4e:c9:55:84:99:60:2d:48:de:5f:bc:f0:3a:25
+# SHA256 Fingerprint: 5d:56:49:9b:e4:d2:e0:8b:cf:ca:d0:8a:3e:38:72:3d:50:50:3b:de:70:69:48:e4:2f:55:60:30:19:e5:28:ae
+-----BEGIN CERTIFICATE-----
+MIIFYDCCA0igAwIBAgIQCgFCgAAAAUUjyES1AAAAAjANBgkqhkiG9w0BAQsFADBK
+MQswCQYDVQQGEwJVUzESMBAGA1UEChMJSWRlblRydXN0MScwJQYDVQQDEx5JZGVu
+VHJ1c3QgQ29tbWVyY2lhbCBSb290IENBIDEwHhcNMTQwMTE2MTgxMjIzWhcNMzQw
+MTE2MTgxMjIzWjBKMQswCQYDVQQGEwJVUzESMBAGA1UEChMJSWRlblRydXN0MScw
+JQYDVQQDEx5JZGVuVHJ1c3QgQ29tbWVyY2lhbCBSb290IENBIDEwggIiMA0GCSqG
+SIb3DQEBAQUAA4ICDwAwggIKAoICAQCnUBneP5k91DNG8W9RYYKyqU+PZ4ldhNlT
+3Qwo2dfw/66VQ3KZ+bVdfIrBQuExUHTRgQ18zZshq0PirK1ehm7zCYofWjK9ouuU
++ehcCuz/mNKvcbO0U59Oh++SvL3sTzIwiEsXXlfEU8L2ApeN2WIrvyQfYo3fw7gp
+S0l4PJNgiCL8mdo2yMKi1CxUAGc1bnO/AljwpN3lsKImesrgNqUZFvX9t++uP0D1
+bVoE/c40yiTcdCMbXTMTEl3EASX2MN0CXZ/g1Ue9tOsbobtJSdifWwLziuQkkORi
+T0/Br4sOdBeo0XKIanoBScy0RnnGF7HamB4HWfp1IYVl3ZBWzvurpWCdxJ35UrCL
+vYf5jysjCiN2O/cz4ckA82n5S6LgTrx+kzmEB/dEcH7+B1rlsazRGMzyNeVJSQjK
+Vsk9+w8YfYs7wRPCTY/JTw436R+hDmrfYi7LNQZReSzIJTj0+kuniVyc0uMNOYZK
+dHzVWYfCP04MXFL0PfdSgvHqo6z9STQaKPNBiDoT7uje/5kdX7rL6B7yuVBgwDHT
+c+XvvqDtMwt0viAgxGds8AgDelWAf0ZOlqf0Hj7h9tgJ4TNkK2PXMl6f+cB7D3hv
+l7yTmvmcEpB4eoCHFddydJxVdHixuuFucAS6T6C6aMN7/zHwcz09lCqxC0EOoP5N
+iGVreTO01wIDAQABo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB
+/zAdBgNVHQ4EFgQU7UQZwNPwBovupHu+QucmVMiONnYwDQYJKoZIhvcNAQELBQAD
+ggIBAA2ukDL2pkt8RHYZYR4nKM1eVO8lvOMIkPkp165oCOGUAFjvLi5+U1KMtlwH
+6oi6mYtQlNeCgN9hCQCTrQ0U5s7B8jeUeLBfnLOic7iPBZM4zY0+sLj7wM+x8uwt
+LRvM7Kqas6pgghstO8OEPVeKlh6cdbjTMM1gCIOQ045U8U1mwF10A0Cj7oV+wh93
+nAbowacYXVKV7cndJZ5t+qntozo00Fl72u1Q8zW/7esUTTHHYPTa8Yec4kjixsU3
++wYQ+nVZZjFHKdp2mhzpgq7vmrlR94gjmmmVYjzlVYA211QC//G5Xc7UI2/YRYRK
+W2XviQzdFKcgyxilJbQN+QHwotL0AMh0jqEqSI5l2xPE4iUXfeu+h1sXIFRRk0pT
+AwvsXcoz7WL9RccvW9xYoIA55vrX/hMUpu09lEpCdNTDd1lzzY9GvlU47/rokTLq
+l1gEIt44w8y8bckzOmoKaT+gyOpyj4xjhiO9bTyWnpXgSUyqorkqG5w2gXjtw+hG
+4iZZRHUe2XWJUc0QhJ1hYMtd+ZciTY6Y5uN/9lu7rs3KSoFrXgvzUeF0K+l+J6fZ
+mUlO+KWA2yUPHGNiiskzZ2s8EIPGrd6ozRaOjfAHN3Gf8qv8QfXBi+wAN10J5U6A
+7/qxXDgGpRtK4dw4LTzcqx+QGtVKnO7RcGzM7vRX+Bi6hG6H
+-----END CERTIFICATE-----
+
+# Issuer: CN=IdenTrust Public Sector Root CA 1 O=IdenTrust
+# Subject: CN=IdenTrust Public Sector Root CA 1 O=IdenTrust
+# Label: "IdenTrust Public Sector Root CA 1"
+# Serial: 13298821034946342390521976156843933698
+# MD5 Fingerprint: 37:06:a5:b0:fc:89:9d:ba:f4:6b:8c:1a:64:cd:d5:ba
+# SHA1 Fingerprint: ba:29:41:60:77:98:3f:f4:f3:ef:f2:31:05:3b:2e:ea:6d:4d:45:fd
+# SHA256 Fingerprint: 30:d0:89:5a:9a:44:8a:26:20:91:63:55:22:d1:f5:20:10:b5:86:7a:ca:e1:2c:78:ef:95:8f:d4:f4:38:9f:2f
+-----BEGIN CERTIFICATE-----
+MIIFZjCCA06gAwIBAgIQCgFCgAAAAUUjz0Z8AAAAAjANBgkqhkiG9w0BAQsFADBN
+MQswCQYDVQQGEwJVUzESMBAGA1UEChMJSWRlblRydXN0MSowKAYDVQQDEyFJZGVu
+VHJ1c3QgUHVibGljIFNlY3RvciBSb290IENBIDEwHhcNMTQwMTE2MTc1MzMyWhcN
+MzQwMTE2MTc1MzMyWjBNMQswCQYDVQQGEwJVUzESMBAGA1UEChMJSWRlblRydXN0
+MSowKAYDVQQDEyFJZGVuVHJ1c3QgUHVibGljIFNlY3RvciBSb290IENBIDEwggIi
+MA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQC2IpT8pEiv6EdrCvsnduTyP4o7
+ekosMSqMjbCpwzFrqHd2hCa2rIFCDQjrVVi7evi8ZX3yoG2LqEfpYnYeEe4IFNGy
+RBb06tD6Hi9e28tzQa68ALBKK0CyrOE7S8ItneShm+waOh7wCLPQ5CQ1B5+ctMlS
+bdsHyo+1W/CD80/HLaXIrcuVIKQxKFdYWuSNG5qrng0M8gozOSI5Cpcu81N3uURF
+/YTLNiCBWS2ab21ISGHKTN9T0a9SvESfqy9rg3LvdYDaBjMbXcjaY8ZNzaxmMc3R
+3j6HEDbhuaR672BQssvKplbgN6+rNBM5Jeg5ZuSYeqoSmJxZZoY+rfGwyj4GD3vw
+EUs3oERte8uojHH01bWRNszwFcYr3lEXsZdMUD2xlVl8BX0tIdUAvwFnol57plzy
+9yLxkA2T26pEUWbMfXYD62qoKjgZl3YNa4ph+bz27nb9cCvdKTz4Ch5bQhyLVi9V
+GxyhLrXHFub4qjySjmm2AcG1hp2JDws4lFTo6tyePSW8Uybt1as5qsVATFSrsrTZ
+2fjXctscvG29ZV/viDUqZi/u9rNl8DONfJhBaUYPQxxp+pu10GFqzcpL2UyQRqsV
+WaFHVCkugyhfHMKiq3IXAAaOReyL4jM9f9oZRORicsPfIsbyVtTdX5Vy7W1f90gD
+W/3FKqD2cyOEEBsB5wIDAQABo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/
+BAUwAwEB/zAdBgNVHQ4EFgQU43HgntinQtnbcZFrlJPrw6PRFKMwDQYJKoZIhvcN
+AQELBQADggIBAEf63QqwEZE4rU1d9+UOl1QZgkiHVIyqZJnYWv6IAcVYpZmxI1Qj
+t2odIFflAWJBF9MJ23XLblSQdf4an4EKwt3X9wnQW3IV5B4Jaj0z8yGa5hV+rVHV
+DRDtfULAj+7AmgjVQdZcDiFpboBhDhXAuM/FSRJSzL46zNQuOAXeNf0fb7iAaJg9
+TaDKQGXSc3z1i9kKlT/YPyNtGtEqJBnZhbMX73huqVjRI9PHE+1yJX9dsXNw0H8G
+lwmEKYBhHfpe/3OsoOOJuBxxFcbeMX8S3OFtm6/n6J91eEyrRjuazr8FGF1NFTwW
+mhlQBJqymm9li1JfPFgEKCXAZmExfrngdbkaqIHWchezxQMxNRF4eKLg6TCMf4Df
+WN88uieW4oA0beOY02QnrEh+KHdcxiVhJfiFDGX6xDIvpZgF5PgLZxYWxoK4Mhn5
++bl53B/N66+rDt0b20XkeucC4pVd/GnwU2lhlXV5C15V5jgclKlZM57IcXR5f1GJ
+tshquDDIajjDbp7hNxbqBWJMWxJH7ae0s1hWx0nzfxJoCTFx8G34Tkf71oXuxVhA
+GaQdp/lLQzfcaFpPz+vCZHTetBXZ9FRUGi8c15dxVJCO2SCdUyt/q4/i6jC8UDfv
+8Ue1fXwsBOxonbRJRBD0ckscZOf85muQ3Wl9af0AVqW3rLatt8o+Ae+c
+-----END CERTIFICATE-----
+
+# Issuer: CN=Entrust Root Certification Authority - G2 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2009 Entrust, Inc. - for authorized use only
+# Subject: CN=Entrust Root Certification Authority - G2 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2009 Entrust, Inc. - for authorized use only
+# Label: "Entrust Root Certification Authority - G2"
+# Serial: 1246989352
+# MD5 Fingerprint: 4b:e2:c9:91:96:65:0c:f4:0e:5a:93:92:a0:0a:fe:b2
+# SHA1 Fingerprint: 8c:f4:27:fd:79:0c:3a:d1:66:06:8d:e8:1e:57:ef:bb:93:22:72:d4
+# SHA256 Fingerprint: 43:df:57:74:b0:3e:7f:ef:5f:e4:0d:93:1a:7b:ed:f1:bb:2e:6b:42:73:8c:4e:6d:38:41:10:3d:3a:a7:f3:39
+-----BEGIN CERTIFICATE-----
+MIIEPjCCAyagAwIBAgIESlOMKDANBgkqhkiG9w0BAQsFADCBvjELMAkGA1UEBhMC
+VVMxFjAUBgNVBAoTDUVudHJ1c3QsIEluYy4xKDAmBgNVBAsTH1NlZSB3d3cuZW50
+cnVzdC5uZXQvbGVnYWwtdGVybXMxOTA3BgNVBAsTMChjKSAyMDA5IEVudHJ1c3Qs
+IEluYy4gLSBmb3IgYXV0aG9yaXplZCB1c2Ugb25seTEyMDAGA1UEAxMpRW50cnVz
+dCBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IC0gRzIwHhcNMDkwNzA3MTcy
+NTU0WhcNMzAxMjA3MTc1NTU0WjCBvjELMAkGA1UEBhMCVVMxFjAUBgNVBAoTDUVu
+dHJ1c3QsIEluYy4xKDAmBgNVBAsTH1NlZSB3d3cuZW50cnVzdC5uZXQvbGVnYWwt
+dGVybXMxOTA3BgNVBAsTMChjKSAyMDA5IEVudHJ1c3QsIEluYy4gLSBmb3IgYXV0
+aG9yaXplZCB1c2Ugb25seTEyMDAGA1UEAxMpRW50cnVzdCBSb290IENlcnRpZmlj
+YXRpb24gQXV0aG9yaXR5IC0gRzIwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEK
+AoIBAQC6hLZy254Ma+KZ6TABp3bqMriVQRrJ2mFOWHLP/vaCeb9zYQYKpSfYs1/T
+RU4cctZOMvJyig/3gxnQaoCAAEUesMfnmr8SVycco2gvCoe9amsOXmXzHHfV1IWN
+cCG0szLni6LVhjkCsbjSR87kyUnEO6fe+1R9V77w6G7CebI6C1XiUJgWMhNcL3hW
+wcKUs/Ja5CeanyTXxuzQmyWC48zCxEXFjJd6BmsqEZ+pCm5IO2/b1BEZQvePB7/1
+U1+cPvQXLOZprE4yTGJ36rfo5bs0vBmLrpxR57d+tVOxMyLlbc9wPBr64ptntoP0
+jaWvYkxN4FisZDQSA/i2jZRjJKRxAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIBBjAP
+BgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBRqciZ60B7vfec7aVHUbI2fkBJmqzAN
+BgkqhkiG9w0BAQsFAAOCAQEAeZ8dlsa2eT8ijYfThwMEYGprmi5ZiXMRrEPR9RP/
+jTkrwPK9T3CMqS/qF8QLVJ7UG5aYMzyorWKiAHarWWluBh1+xLlEjZivEtRh2woZ
+Rkfz6/djwUAFQKXSt/S1mja/qYh2iARVBCuch38aNzx+LaUa2NSJXsq9rD1s2G2v
+1fN2D807iDginWyTmsQ9v4IbZT+mD12q/OWyFcq1rca8PdCE6OoGcrBNOTJ4vz4R
+nAuknZoh8/CbCzB428Hch0P+vGOaysXCHMnHjf87ElgI5rY97HosTvuDls4MPGmH
+VHOkc8KT/1EQrBVUAdj8BbGJoX90g5pJ19xOe4pIb4tF9g==
+-----END CERTIFICATE-----
+
+# Issuer: CN=Entrust Root Certification Authority - EC1 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2012 Entrust, Inc. - for authorized use only
+# Subject: CN=Entrust Root Certification Authority - EC1 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2012 Entrust, Inc. - for authorized use only
+# Label: "Entrust Root Certification Authority - EC1"
+# Serial: 51543124481930649114116133369
+# MD5 Fingerprint: b6:7e:1d:f0:58:c5:49:6c:24:3b:3d:ed:98:18:ed:bc
+# SHA1 Fingerprint: 20:d8:06:40:df:9b:25:f5:12:25:3a:11:ea:f7:59:8a:eb:14:b5:47
+# SHA256 Fingerprint: 02:ed:0e:b2:8c:14:da:45:16:5c:56:67:91:70:0d:64:51:d7:fb:56:f0:b2:ab:1d:3b:8e:b0:70:e5:6e:df:f5
+-----BEGIN CERTIFICATE-----
+MIIC+TCCAoCgAwIBAgINAKaLeSkAAAAAUNCR+TAKBggqhkjOPQQDAzCBvzELMAkG
+A1UEBhMCVVMxFjAUBgNVBAoTDUVudHJ1c3QsIEluYy4xKDAmBgNVBAsTH1NlZSB3
+d3cuZW50cnVzdC5uZXQvbGVnYWwtdGVybXMxOTA3BgNVBAsTMChjKSAyMDEyIEVu
+dHJ1c3QsIEluYy4gLSBmb3IgYXV0aG9yaXplZCB1c2Ugb25seTEzMDEGA1UEAxMq
+RW50cnVzdCBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IC0gRUMxMB4XDTEy
+MTIxODE1MjUzNloXDTM3MTIxODE1NTUzNlowgb8xCzAJBgNVBAYTAlVTMRYwFAYD
+VQQKEw1FbnRydXN0LCBJbmMuMSgwJgYDVQQLEx9TZWUgd3d3LmVudHJ1c3QubmV0
+L2xlZ2FsLXRlcm1zMTkwNwYDVQQLEzAoYykgMjAxMiBFbnRydXN0LCBJbmMuIC0g
+Zm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxMzAxBgNVBAMTKkVudHJ1c3QgUm9vdCBD
+ZXJ0aWZpY2F0aW9uIEF1dGhvcml0eSAtIEVDMTB2MBAGByqGSM49AgEGBSuBBAAi
+A2IABIQTydC6bUF74mzQ61VfZgIaJPRbiWlH47jCffHyAsWfoPZb1YsGGYZPUxBt
+ByQnoaD41UcZYUx9ypMn6nQM72+WCf5j7HBdNq1nd67JnXxVRDqiY1Ef9eNi1KlH
+Bz7MIKNCMEAwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0O
+BBYEFLdj5xrdjekIplWDpOBqUEFlEUJJMAoGCCqGSM49BAMDA2cAMGQCMGF52OVC
+R98crlOZF7ZvHH3hvxGU0QOIdeSNiaSKd0bebWHvAvX7td/M/k7//qnmpwIwW5nX
+hTcGtXsI/esni0qU+eH6p44mCOh8kmhtc9hvJqwhAriZtyZBWyVgrtBIGu4G
+-----END CERTIFICATE-----
+
+# Issuer: CN=CFCA EV ROOT O=China Financial Certification Authority
+# Subject: CN=CFCA EV ROOT O=China Financial Certification Authority
+# Label: "CFCA EV ROOT"
+# Serial: 407555286
+# MD5 Fingerprint: 74:e1:b6:ed:26:7a:7a:44:30:33:94:ab:7b:27:81:30
+# SHA1 Fingerprint: e2:b8:29:4b:55:84:ab:6b:58:c2:90:46:6c:ac:3f:b8:39:8f:84:83
+# SHA256 Fingerprint: 5c:c3:d7:8e:4e:1d:5e:45:54:7a:04:e6:87:3e:64:f9:0c:f9:53:6d:1c:cc:2e:f8:00:f3:55:c4:c5:fd:70:fd
+-----BEGIN CERTIFICATE-----
+MIIFjTCCA3WgAwIBAgIEGErM1jANBgkqhkiG9w0BAQsFADBWMQswCQYDVQQGEwJD
+TjEwMC4GA1UECgwnQ2hpbmEgRmluYW5jaWFsIENlcnRpZmljYXRpb24gQXV0aG9y
+aXR5MRUwEwYDVQQDDAxDRkNBIEVWIFJPT1QwHhcNMTIwODA4MDMwNzAxWhcNMjkx
+MjMxMDMwNzAxWjBWMQswCQYDVQQGEwJDTjEwMC4GA1UECgwnQ2hpbmEgRmluYW5j
+aWFsIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MRUwEwYDVQQDDAxDRkNBIEVWIFJP
+T1QwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDXXWvNED8fBVnVBU03
+sQ7smCuOFR36k0sXgiFxEFLXUWRwFsJVaU2OFW2fvwwbwuCjZ9YMrM8irq93VCpL
+TIpTUnrD7i7es3ElweldPe6hL6P3KjzJIx1qqx2hp/Hz7KDVRM8Vz3IvHWOX6Jn5
+/ZOkVIBMUtRSqy5J35DNuF++P96hyk0g1CXohClTt7GIH//62pCfCqktQT+x8Rgp
+7hZZLDRJGqgG16iI0gNyejLi6mhNbiyWZXvKWfry4t3uMCz7zEasxGPrb382KzRz
+EpR/38wmnvFyXVBlWY9ps4deMm/DGIq1lY+wejfeWkU7xzbh72fROdOXW3NiGUgt
+hxwG+3SYIElz8AXSG7Ggo7cbcNOIabla1jj0Ytwli3i/+Oh+uFzJlU9fpy25IGvP
+a931DfSCt/SyZi4QKPaXWnuWFo8BGS1sbn85WAZkgwGDg8NNkt0yxoekN+kWzqot
+aK8KgWU6cMGbrU1tVMoqLUuFG7OA5nBFDWteNfB/O7ic5ARwiRIlk9oKmSJgamNg
+TnYGmE69g60dWIolhdLHZR4tjsbftsbhf4oEIRUpdPA+nJCdDC7xij5aqgwJHsfV
+PKPtl8MeNPo4+QgO48BdK4PRVmrJtqhUUy54Mmc9gn900PvhtgVguXDbjgv5E1hv
+cWAQUhC5wUEJ73IfZzF4/5YFjQIDAQABo2MwYTAfBgNVHSMEGDAWgBTj/i39KNAL
+tbq2osS/BqoFjJP7LzAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBBjAd
+BgNVHQ4EFgQU4/4t/SjQC7W6tqLEvwaqBYyT+y8wDQYJKoZIhvcNAQELBQADggIB
+ACXGumvrh8vegjmWPfBEp2uEcwPenStPuiB/vHiyz5ewG5zz13ku9Ui20vsXiObT
+ej/tUxPQ4i9qecsAIyjmHjdXNYmEwnZPNDatZ8POQQaIxffu2Bq41gt/UP+TqhdL
+jOztUmCypAbqTuv0axn96/Ua4CUqmtzHQTb3yHQFhDmVOdYLO6Qn+gjYXB74BGBS
+ESgoA//vU2YApUo0FmZ8/Qmkrp5nGm9BC2sGE5uPhnEFtC+NiWYzKXZUmhH4J/qy
+P5Hgzg0b8zAarb8iXRvTvyUFTeGSGn+ZnzxEk8rUQElsgIfXBDrDMlI1Dlb4pd19
+xIsNER9Tyx6yF7Zod1rg1MvIB671Oi6ON7fQAUtDKXeMOZePglr4UeWJoBjnaH9d
+Ci77o0cOPaYjesYBx4/IXr9tgFa+iiS6M+qf4TIRnvHST4D2G0CvOJ4RUHlzEhLN
+5mydLIhyPDCBBpEi6lmt2hkuIsKNuYyH4Ga8cyNfIWRjgEj1oDwYPZTISEEdQLpe
+/v5WOaHIz16eGWRGENoXkbcFgKyLmZJ956LYBws2J+dIeWCKw9cTXPhyQN9Ky8+Z
+AAoACxGV2lZFA4gKn2fQ1XmxqI1AbQ3CekD6819kR5LLU7m7Wc5P/dAVUwHY3+vZ
+5nbv0CO7O6l5s9UCKc2Jo5YPSjXnTkLAdc0Hz+Ys63su
+-----END CERTIFICATE-----
+
+# Issuer: CN=OISTE WISeKey Global Root GB CA O=WISeKey OU=OISTE Foundation Endorsed
+# Subject: CN=OISTE WISeKey Global Root GB CA O=WISeKey OU=OISTE Foundation Endorsed
+# Label: "OISTE WISeKey Global Root GB CA"
+# Serial: 157768595616588414422159278966750757568
+# MD5 Fingerprint: a4:eb:b9:61:28:2e:b7:2f:98:b0:35:26:90:99:51:1d
+# SHA1 Fingerprint: 0f:f9:40:76:18:d3:d7:6a:4b:98:f0:a8:35:9e:0c:fd:27:ac:cc:ed
+# SHA256 Fingerprint: 6b:9c:08:e8:6e:b0:f7:67:cf:ad:65:cd:98:b6:21:49:e5:49:4a:67:f5:84:5e:7b:d1:ed:01:9f:27:b8:6b:d6
+-----BEGIN CERTIFICATE-----
+MIIDtTCCAp2gAwIBAgIQdrEgUnTwhYdGs/gjGvbCwDANBgkqhkiG9w0BAQsFADBt
+MQswCQYDVQQGEwJDSDEQMA4GA1UEChMHV0lTZUtleTEiMCAGA1UECxMZT0lTVEUg
+Rm91bmRhdGlvbiBFbmRvcnNlZDEoMCYGA1UEAxMfT0lTVEUgV0lTZUtleSBHbG9i
+YWwgUm9vdCBHQiBDQTAeFw0xNDEyMDExNTAwMzJaFw0zOTEyMDExNTEwMzFaMG0x
+CzAJBgNVBAYTAkNIMRAwDgYDVQQKEwdXSVNlS2V5MSIwIAYDVQQLExlPSVNURSBG
+b3VuZGF0aW9uIEVuZG9yc2VkMSgwJgYDVQQDEx9PSVNURSBXSVNlS2V5IEdsb2Jh
+bCBSb290IEdCIENBMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA2Be3
+HEokKtaXscriHvt9OO+Y9bI5mE4nuBFde9IllIiCFSZqGzG7qFshISvYD06fWvGx
+WuR51jIjK+FTzJlFXHtPrby/h0oLS5daqPZI7H17Dc0hBt+eFf1Biki3IPShehtX
+1F1Q/7pn2COZH8g/497/b1t3sWtuuMlk9+HKQUYOKXHQuSP8yYFfTvdv37+ErXNk
+u7dCjmn21HYdfp2nuFeKUWdy19SouJVUQHMD9ur06/4oQnc/nSMbsrY9gBQHTC5P
+99UKFg29ZkM3fiNDecNAhvVMKdqOmq0NpQSHiB6F4+lT1ZvIiwNjeOvgGUpuuy9r
+M2RYk61pv48b74JIxwIDAQABo1EwTzALBgNVHQ8EBAMCAYYwDwYDVR0TAQH/BAUw
+AwEB/zAdBgNVHQ4EFgQUNQ/INmNe4qPs+TtmFc5RUuORmj0wEAYJKwYBBAGCNxUB
+BAMCAQAwDQYJKoZIhvcNAQELBQADggEBAEBM+4eymYGQfp3FsLAmzYh7KzKNbrgh
+cViXfa43FK8+5/ea4n32cZiZBKpDdHij40lhPnOMTZTg+XHEthYOU3gf1qKHLwI5
+gSk8rxWYITD+KJAAjNHhy/peyP34EEY7onhCkRd0VQreUGdNZtGn//3ZwLWoo4rO
+ZvUPQ82nK1d7Y0Zqqi5S2PTt4W2tKZB4SLrhI6qjiey1q5bAtEuiHZeeevJuQHHf
+aPFlTc58Bd9TZaml8LGXBHAVRgOY1NK/VLSgWH1Sb9pWJmLU2NuJMW8c8CLC02Ic
+Nc1MaRVUGpCY3useX8p3x8uOPUNpnJpY0CQ73xtAln41rYHHTnG6iBM=
+-----END CERTIFICATE-----
+
+# Issuer: CN=SZAFIR ROOT CA2 O=Krajowa Izba Rozliczeniowa S.A.
+# Subject: CN=SZAFIR ROOT CA2 O=Krajowa Izba Rozliczeniowa S.A.
+# Label: "SZAFIR ROOT CA2"
+# Serial: 357043034767186914217277344587386743377558296292
+# MD5 Fingerprint: 11:64:c1:89:b0:24:b1:8c:b1:07:7e:89:9e:51:9e:99
+# SHA1 Fingerprint: e2:52:fa:95:3f:ed:db:24:60:bd:6e:28:f3:9c:cc:cf:5e:b3:3f:de
+# SHA256 Fingerprint: a1:33:9d:33:28:1a:0b:56:e5:57:d3:d3:2b:1c:e7:f9:36:7e:b0:94:bd:5f:a7:2a:7e:50:04:c8:de:d7:ca:fe
+-----BEGIN CERTIFICATE-----
+MIIDcjCCAlqgAwIBAgIUPopdB+xV0jLVt+O2XwHrLdzk1uQwDQYJKoZIhvcNAQEL
+BQAwUTELMAkGA1UEBhMCUEwxKDAmBgNVBAoMH0tyYWpvd2EgSXpiYSBSb3psaWN6
+ZW5pb3dhIFMuQS4xGDAWBgNVBAMMD1NaQUZJUiBST09UIENBMjAeFw0xNTEwMTkw
+NzQzMzBaFw0zNTEwMTkwNzQzMzBaMFExCzAJBgNVBAYTAlBMMSgwJgYDVQQKDB9L
+cmFqb3dhIEl6YmEgUm96bGljemVuaW93YSBTLkEuMRgwFgYDVQQDDA9TWkFGSVIg
+Uk9PVCBDQTIwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQC3vD5QqEvN
+QLXOYeeWyrSh2gwisPq1e3YAd4wLz32ohswmUeQgPYUM1ljj5/QqGJ3a0a4m7utT
+3PSQ1hNKDJA8w/Ta0o4NkjrcsbH/ON7Dui1fgLkCvUqdGw+0w8LBZwPd3BucPbOw
+3gAeqDRHu5rr/gsUvTaE2g0gv/pby6kWIK05YO4vdbbnl5z5Pv1+TW9NL++IDWr6
+3fE9biCloBK0TXC5ztdyO4mTp4CEHCdJckm1/zuVnsHMyAHs6A6KCpbns6aH5db5
+BSsNl0BwPLqsdVqc1U2dAgrSS5tmS0YHF2Wtn2yIANwiieDhZNRnvDF5YTy7ykHN
+XGoAyDw4jlivAgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQD
+AgEGMB0GA1UdDgQWBBQuFqlKGLXLzPVvUPMjX/hd56zwyDANBgkqhkiG9w0BAQsF
+AAOCAQEAtXP4A9xZWx126aMqe5Aosk3AM0+qmrHUuOQn/6mWmc5G4G18TKI4pAZw
+8PRBEew/R40/cof5O/2kbytTAOD/OblqBw7rHRz2onKQy4I9EYKL0rufKq8h5mOG
+nXkZ7/e7DDWQw4rtTw/1zBLZpD67oPwglV9PJi8RI4NOdQcPv5vRtB3pEAT+ymCP
+oky4rc/hkA/NrgrHXXu3UNLUYfrVFdvXn4dRVOul4+vJhaAlIDf7js4MNIThPIGy
+d05DpYhfhmehPea0XGG2Ptv+tyjFogeutcrKjSoS75ftwjCkySp6+/NNIxuZMzSg
+LvWpCz/UXeHPhJ/iGcJfitYgHuNztw==
+-----END CERTIFICATE-----
+
+# Issuer: CN=Certum Trusted Network CA 2 O=Unizeto Technologies S.A. OU=Certum Certification Authority
+# Subject: CN=Certum Trusted Network CA 2 O=Unizeto Technologies S.A. OU=Certum Certification Authority
+# Label: "Certum Trusted Network CA 2"
+# Serial: 44979900017204383099463764357512596969
+# MD5 Fingerprint: 6d:46:9e:d9:25:6d:08:23:5b:5e:74:7d:1e:27:db:f2
+# SHA1 Fingerprint: d3:dd:48:3e:2b:bf:4c:05:e8:af:10:f5:fa:76:26:cf:d3:dc:30:92
+# SHA256 Fingerprint: b6:76:f2:ed:da:e8:77:5c:d3:6c:b0:f6:3c:d1:d4:60:39:61:f4:9e:62:65:ba:01:3a:2f:03:07:b6:d0:b8:04
+-----BEGIN CERTIFICATE-----
+MIIF0jCCA7qgAwIBAgIQIdbQSk8lD8kyN/yqXhKN6TANBgkqhkiG9w0BAQ0FADCB
+gDELMAkGA1UEBhMCUEwxIjAgBgNVBAoTGVVuaXpldG8gVGVjaG5vbG9naWVzIFMu
+QS4xJzAlBgNVBAsTHkNlcnR1bSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTEkMCIG
+A1UEAxMbQ2VydHVtIFRydXN0ZWQgTmV0d29yayBDQSAyMCIYDzIwMTExMDA2MDgz
+OTU2WhgPMjA0NjEwMDYwODM5NTZaMIGAMQswCQYDVQQGEwJQTDEiMCAGA1UEChMZ
+VW5pemV0byBUZWNobm9sb2dpZXMgUy5BLjEnMCUGA1UECxMeQ2VydHVtIENlcnRp
+ZmljYXRpb24gQXV0aG9yaXR5MSQwIgYDVQQDExtDZXJ0dW0gVHJ1c3RlZCBOZXR3
+b3JrIENBIDIwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQC9+Xj45tWA
+DGSdhhuWZGc/IjoedQF97/tcZ4zJzFxrqZHmuULlIEub2pt7uZld2ZuAS9eEQCsn
+0+i6MLs+CRqnSZXvK0AkwpfHp+6bJe+oCgCXhVqqndwpyeI1B+twTUrWwbNWuKFB
+OJvR+zF/j+Bf4bE/D44WSWDXBo0Y+aomEKsq09DRZ40bRr5HMNUuctHFY9rnY3lE
+fktjJImGLjQ/KUxSiyqnwOKRKIm5wFv5HdnnJ63/mgKXwcZQkpsCLL2puTRZCr+E
+Sv/f/rOf69me4Jgj7KZrdxYq28ytOxykh9xGc14ZYmhFV+SQgkK7QtbwYeDBoz1m
+o130GO6IyY0XRSmZMnUCMe4pJshrAua1YkV/NxVaI2iJ1D7eTiew8EAMvE0Xy02i
+sx7QBlrd9pPPV3WZ9fqGGmd4s7+W/jTcvedSVuWz5XV710GRBdxdaeOVDUO5/IOW
+OZV7bIBaTxNyxtd9KXpEulKkKtVBRgkg/iKgtlswjbyJDNXXcPiHUv3a76xRLgez
+Tv7QCdpw75j6VuZt27VXS9zlLCUVyJ4ueE742pyehizKV/Ma5ciSixqClnrDvFAS
+adgOWkaLOusm+iPJtrCBvkIApPjW/jAux9JG9uWOdf3yzLnQh1vMBhBgu4M1t15n
+3kfsmUjxpKEV/q2MYo45VU85FrmxY53/twIDAQABo0IwQDAPBgNVHRMBAf8EBTAD
+AQH/MB0GA1UdDgQWBBS2oVQ5AsOgP46KvPrU+Bym0ToO/TAOBgNVHQ8BAf8EBAMC
+AQYwDQYJKoZIhvcNAQENBQADggIBAHGlDs7k6b8/ONWJWsQCYftMxRQXLYtPU2sQ
+F/xlhMcQSZDe28cmk4gmb3DWAl45oPePq5a1pRNcgRRtDoGCERuKTsZPpd1iHkTf
+CVn0W3cLN+mLIMb4Ck4uWBzrM9DPhmDJ2vuAL55MYIR4PSFk1vtBHxgP58l1cb29
+XN40hz5BsA72udY/CROWFC/emh1auVbONTqwX3BNXuMp8SMoclm2q8KMZiYcdywm
+djWLKKdpoPk79SPdhRB0yZADVpHnr7pH1BKXESLjokmUbOe3lEu6LaTaM4tMpkT/
+WjzGHWTYtTHkpjx6qFcL2+1hGsvxznN3Y6SHb0xRONbkX8eftoEq5IVIeVheO/jb
+AoJnwTnbw3RLPTYe+SmTiGhbqEQZIfCn6IENLOiTNrQ3ssqwGyZ6miUfmpqAnksq
+P/ujmv5zMnHCnsZy4YpoJ/HkD7TETKVhk/iXEAcqMCWpuchxuO9ozC1+9eB+D4Ko
+b7a6bINDd82Kkhehnlt4Fj1F4jNy3eFmypnTycUm/Q1oBEauttmbjL4ZvrHG8hnj
+XALKLNhvSgfZyTXaQHXyxKcZb55CEJh15pWLYLztxRLXis7VmFxWlgPF7ncGNf/P
+5O4/E2Hu29othfDNrp2yGAlFw5Khchf8R7agCyzxxN5DaAhqXzvwdmP7zAYspsbi
+DrW5viSP
+-----END CERTIFICATE-----
+
+# Issuer: CN=Hellenic Academic and Research Institutions RootCA 2015 O=Hellenic Academic and Research Institutions Cert. Authority
+# Subject: CN=Hellenic Academic and Research Institutions RootCA 2015 O=Hellenic Academic and Research Institutions Cert. Authority
+# Label: "Hellenic Academic and Research Institutions RootCA 2015"
+# Serial: 0
+# MD5 Fingerprint: ca:ff:e2:db:03:d9:cb:4b:e9:0f:ad:84:fd:7b:18:ce
+# SHA1 Fingerprint: 01:0c:06:95:a6:98:19:14:ff:bf:5f:c6:b0:b6:95:ea:29:e9:12:a6
+# SHA256 Fingerprint: a0:40:92:9a:02:ce:53:b4:ac:f4:f2:ff:c6:98:1c:e4:49:6f:75:5e:6d:45:fe:0b:2a:69:2b:cd:52:52:3f:36
+-----BEGIN CERTIFICATE-----
+MIIGCzCCA/OgAwIBAgIBADANBgkqhkiG9w0BAQsFADCBpjELMAkGA1UEBhMCR1Ix
+DzANBgNVBAcTBkF0aGVuczFEMEIGA1UEChM7SGVsbGVuaWMgQWNhZGVtaWMgYW5k
+IFJlc2VhcmNoIEluc3RpdHV0aW9ucyBDZXJ0LiBBdXRob3JpdHkxQDA+BgNVBAMT
+N0hlbGxlbmljIEFjYWRlbWljIGFuZCBSZXNlYXJjaCBJbnN0aXR1dGlvbnMgUm9v
+dENBIDIwMTUwHhcNMTUwNzA3MTAxMTIxWhcNNDAwNjMwMTAxMTIxWjCBpjELMAkG
+A1UEBhMCR1IxDzANBgNVBAcTBkF0aGVuczFEMEIGA1UEChM7SGVsbGVuaWMgQWNh
+ZGVtaWMgYW5kIFJlc2VhcmNoIEluc3RpdHV0aW9ucyBDZXJ0LiBBdXRob3JpdHkx
+QDA+BgNVBAMTN0hlbGxlbmljIEFjYWRlbWljIGFuZCBSZXNlYXJjaCBJbnN0aXR1
+dGlvbnMgUm9vdENBIDIwMTUwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoIC
+AQDC+Kk/G4n8PDwEXT2QNrCROnk8ZlrvbTkBSRq0t89/TSNTt5AA4xMqKKYx8ZEA
+4yjsriFBzh/a/X0SWwGDD7mwX5nh8hKDgE0GPt+sr+ehiGsxr/CL0BgzuNtFajT0
+AoAkKAoCFZVedioNmToUW/bLy1O8E00BiDeUJRtCvCLYjqOWXjrZMts+6PAQZe10
+4S+nfK8nNLspfZu2zwnI5dMK/IhlZXQK3HMcXM1AsRzUtoSMTFDPaI6oWa7CJ06C
+ojXdFPQf/7J31Ycvqm59JCfnxssm5uX+Zwdj2EUN3TpZZTlYepKZcj2chF6IIbjV
+9Cz82XBST3i4vTwri5WY9bPRaM8gFH5MXF/ni+X1NYEZN9cRCLdmvtNKzoNXADrD
+gfgXy5I2XdGj2HUb4Ysn6npIQf1FGQatJ5lOwXBH3bWfgVMS5bGMSF0xQxfjjMZ6
+Y5ZLKTBOhE5iGV48zpeQpX8B653g+IuJ3SWYPZK2fu/Z8VFRfS0myGlZYeCsargq
+NhEEelC9MoS+L9xy1dcdFkfkR2YgP/SWxa+OAXqlD3pk9Q0Yh9muiNX6hME6wGko
+LfINaFGq46V3xqSQDqE3izEjR8EJCOtu93ib14L8hCCZSRm2Ekax+0VVFqmjZayc
+Bw/qa9wfLgZy7IaIEuQt218FL+TwA9MmM+eAws1CoRc0CwIDAQABo0IwQDAPBgNV
+HRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUcRVnyMjJvXVd
+ctA4GGqd83EkVAswDQYJKoZIhvcNAQELBQADggIBAHW7bVRLqhBYRjTyYtcWNl0I
+XtVsyIe9tC5G8jH4fOpCtZMWVdyhDBKg2mF+D1hYc2Ryx+hFjtyp8iY/xnmMsVMI
+M4GwVhO+5lFc2JsKT0ucVlMC6U/2DWDqTUJV6HwbISHTGzrMd/K4kPFox/la/vot
+9L/J9UUbzjgQKjeKeaO04wlshYaT/4mWJ3iBj2fjRnRUjtkNaeJK9E10A/+yd+2V
+Z5fkscWrv2oj6NSU4kQoYsRL4vDY4ilrGnB+JGGTe08DMiUNRSQrlrRGar9KC/ea
+j8GsGsVn82800vpzY4zvFrCopEYq+OsS7HK07/grfoxSwIuEVPkvPuNVqNxmsdnh
+X9izjFk0WaSrT2y7HxjbdavYy5LNlDhhDgcGH0tGEPEVvo2FXDtKK4F5D7Rpn0lQ
+l033DlZdwJVqwjbDG2jJ9SrcR5q+ss7FJej6A7na+RZukYT1HCjI/CbM1xyQVqdf
+bzoEvM14iQuODy+jqk+iGxI9FghAD/FGTNeqewjBCvVtJ94Cj8rDtSvK6evIIVM4
+pcw72Hc3MKJP2W/R8kCtQXoXxdZKNYm3QdV8hn9VTYNKpXMgwDqvkPGaJI7ZjnHK
+e7iG2rKPmT4dEw0SEe7Uq/DpFXYC5ODfqiAeW2GFZECpkJcNrVPSWh2HagCXZWK0
+vm9qp/UsQu0yrbYhnr68
+-----END CERTIFICATE-----
+
+# Issuer: CN=Hellenic Academic and Research Institutions ECC RootCA 2015 O=Hellenic Academic and Research Institutions Cert. Authority
+# Subject: CN=Hellenic Academic and Research Institutions ECC RootCA 2015 O=Hellenic Academic and Research Institutions Cert. Authority
+# Label: "Hellenic Academic and Research Institutions ECC RootCA 2015"
+# Serial: 0
+# MD5 Fingerprint: 81:e5:b4:17:eb:c2:f5:e1:4b:0d:41:7b:49:92:fe:ef
+# SHA1 Fingerprint: 9f:f1:71:8d:92:d5:9a:f3:7d:74:97:b4:bc:6f:84:68:0b:ba:b6:66
+# SHA256 Fingerprint: 44:b5:45:aa:8a:25:e6:5a:73:ca:15:dc:27:fc:36:d2:4c:1c:b9:95:3a:06:65:39:b1:15:82:dc:48:7b:48:33
+-----BEGIN CERTIFICATE-----
+MIICwzCCAkqgAwIBAgIBADAKBggqhkjOPQQDAjCBqjELMAkGA1UEBhMCR1IxDzAN
+BgNVBAcTBkF0aGVuczFEMEIGA1UEChM7SGVsbGVuaWMgQWNhZGVtaWMgYW5kIFJl
+c2VhcmNoIEluc3RpdHV0aW9ucyBDZXJ0LiBBdXRob3JpdHkxRDBCBgNVBAMTO0hl
+bGxlbmljIEFjYWRlbWljIGFuZCBSZXNlYXJjaCBJbnN0aXR1dGlvbnMgRUNDIFJv
+b3RDQSAyMDE1MB4XDTE1MDcwNzEwMzcxMloXDTQwMDYzMDEwMzcxMlowgaoxCzAJ
+BgNVBAYTAkdSMQ8wDQYDVQQHEwZBdGhlbnMxRDBCBgNVBAoTO0hlbGxlbmljIEFj
+YWRlbWljIGFuZCBSZXNlYXJjaCBJbnN0aXR1dGlvbnMgQ2VydC4gQXV0aG9yaXR5
+MUQwQgYDVQQDEztIZWxsZW5pYyBBY2FkZW1pYyBhbmQgUmVzZWFyY2ggSW5zdGl0
+dXRpb25zIEVDQyBSb290Q0EgMjAxNTB2MBAGByqGSM49AgEGBSuBBAAiA2IABJKg
+QehLgoRc4vgxEZmGZE4JJS+dQS8KrjVPdJWyUWRrjWvmP3CV8AVER6ZyOFB2lQJa
+jq4onvktTpnvLEhvTCUp6NFxW98dwXU3tNf6e3pCnGoKVlp8aQuqgAkkbH7BRqNC
+MEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFLQi
+C4KZJAEOnLvkDv2/+5cgk5kqMAoGCCqGSM49BAMCA2cAMGQCMGfOFmI4oqxiRaep
+lSTAGiecMjvAwNW6qef4BENThe5SId6d9SWDPp5YSy/XZxMOIQIwBeF1Ad5o7Sof
+TUwJCA3sS61kFyjndc5FZXIhF8siQQ6ME5g4mlRtm8rifOoCWCKR
+-----END CERTIFICATE-----
+
+# Issuer: CN=ISRG Root X1 O=Internet Security Research Group
+# Subject: CN=ISRG Root X1 O=Internet Security Research Group
+# Label: "ISRG Root X1"
+# Serial: 172886928669790476064670243504169061120
+# MD5 Fingerprint: 0c:d2:f9:e0:da:17:73:e9:ed:86:4d:a5:e3:70:e7:4e
+# SHA1 Fingerprint: ca:bd:2a:79:a1:07:6a:31:f2:1d:25:36:35:cb:03:9d:43:29:a5:e8
+# SHA256 Fingerprint: 96:bc:ec:06:26:49:76:f3:74:60:77:9a:cf:28:c5:a7:cf:e8:a3:c0:aa:e1:1a:8f:fc:ee:05:c0:bd:df:08:c6
+-----BEGIN CERTIFICATE-----
+MIIFazCCA1OgAwIBAgIRAIIQz7DSQONZRGPgu2OCiwAwDQYJKoZIhvcNAQELBQAw
+TzELMAkGA1UEBhMCVVMxKTAnBgNVBAoTIEludGVybmV0IFNlY3VyaXR5IFJlc2Vh
+cmNoIEdyb3VwMRUwEwYDVQQDEwxJU1JHIFJvb3QgWDEwHhcNMTUwNjA0MTEwNDM4
+WhcNMzUwNjA0MTEwNDM4WjBPMQswCQYDVQQGEwJVUzEpMCcGA1UEChMgSW50ZXJu
+ZXQgU2VjdXJpdHkgUmVzZWFyY2ggR3JvdXAxFTATBgNVBAMTDElTUkcgUm9vdCBY
+MTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAK3oJHP0FDfzm54rVygc
+h77ct984kIxuPOZXoHj3dcKi/vVqbvYATyjb3miGbESTtrFj/RQSa78f0uoxmyF+
+0TM8ukj13Xnfs7j/EvEhmkvBioZxaUpmZmyPfjxwv60pIgbz5MDmgK7iS4+3mX6U
+A5/TR5d8mUgjU+g4rk8Kb4Mu0UlXjIB0ttov0DiNewNwIRt18jA8+o+u3dpjq+sW
+T8KOEUt+zwvo/7V3LvSye0rgTBIlDHCNAymg4VMk7BPZ7hm/ELNKjD+Jo2FR3qyH
+B5T0Y3HsLuJvW5iB4YlcNHlsdu87kGJ55tukmi8mxdAQ4Q7e2RCOFvu396j3x+UC
+B5iPNgiV5+I3lg02dZ77DnKxHZu8A/lJBdiB3QW0KtZB6awBdpUKD9jf1b0SHzUv
+KBds0pjBqAlkd25HN7rOrFleaJ1/ctaJxQZBKT5ZPt0m9STJEadao0xAH0ahmbWn
+OlFuhjuefXKnEgV4We0+UXgVCwOPjdAvBbI+e0ocS3MFEvzG6uBQE3xDk3SzynTn
+jh8BCNAw1FtxNrQHusEwMFxIt4I7mKZ9YIqioymCzLq9gwQbooMDQaHWBfEbwrbw
+qHyGO0aoSCqI3Haadr8faqU9GY/rOPNk3sgrDQoo//fb4hVC1CLQJ13hef4Y53CI
+rU7m2Ys6xt0nUW7/vGT1M0NPAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIBBjAPBgNV
+HRMBAf8EBTADAQH/MB0GA1UdDgQWBBR5tFnme7bl5AFzgAiIyBpY9umbbjANBgkq
+hkiG9w0BAQsFAAOCAgEAVR9YqbyyqFDQDLHYGmkgJykIrGF1XIpu+ILlaS/V9lZL
+ubhzEFnTIZd+50xx+7LSYK05qAvqFyFWhfFQDlnrzuBZ6brJFe+GnY+EgPbk6ZGQ
+3BebYhtF8GaV0nxvwuo77x/Py9auJ/GpsMiu/X1+mvoiBOv/2X/qkSsisRcOj/KK
+NFtY2PwByVS5uCbMiogziUwthDyC3+6WVwW6LLv3xLfHTjuCvjHIInNzktHCgKQ5
+ORAzI4JMPJ+GslWYHb4phowim57iaztXOoJwTdwJx4nLCgdNbOhdjsnvzqvHu7Ur
+TkXWStAmzOVyyghqpZXjFaH3pO3JLF+l+/+sKAIuvtd7u+Nxe5AW0wdeRlN8NwdC
+jNPElpzVmbUq4JUagEiuTDkHzsxHpFKVK7q4+63SM1N95R1NbdWhscdCb+ZAJzVc
+oyi3B43njTOQ5yOf+1CceWxG1bQVs5ZufpsMljq4Ui0/1lvh+wjChP4kqKOJ2qxq
+4RgqsahDYVvTH9w7jXbyLeiNdd8XM2w9U/t7y0Ff/9yi0GE44Za4rF2LN9d11TPA
+mRGunUHBcnWEvgJBQl9nJEiU0Zsnvgc/ubhPgXRR4Xq37Z0j4r7g1SgEEzwxA57d
+emyPxgcYxn/eR44/KJ4EBs+lVDR3veyJm+kXQ99b21/+jh5Xos1AnX5iItreGCc=
+-----END CERTIFICATE-----
+
+# Issuer: O=FNMT-RCM OU=AC RAIZ FNMT-RCM
+# Subject: O=FNMT-RCM OU=AC RAIZ FNMT-RCM
+# Label: "AC RAIZ FNMT-RCM"
+# Serial: 485876308206448804701554682760554759
+# MD5 Fingerprint: e2:09:04:b4:d3:bd:d1:a0:14:fd:1a:d2:47:c4:57:1d
+# SHA1 Fingerprint: ec:50:35:07:b2:15:c4:95:62:19:e2:a8:9a:5b:42:99:2c:4c:2c:20
+# SHA256 Fingerprint: eb:c5:57:0c:29:01:8c:4d:67:b1:aa:12:7b:af:12:f7:03:b4:61:1e:bc:17:b7:da:b5:57:38:94:17:9b:93:fa
+-----BEGIN CERTIFICATE-----
+MIIFgzCCA2ugAwIBAgIPXZONMGc2yAYdGsdUhGkHMA0GCSqGSIb3DQEBCwUAMDsx
+CzAJBgNVBAYTAkVTMREwDwYDVQQKDAhGTk1ULVJDTTEZMBcGA1UECwwQQUMgUkFJ
+WiBGTk1ULVJDTTAeFw0wODEwMjkxNTU5NTZaFw0zMDAxMDEwMDAwMDBaMDsxCzAJ
+BgNVBAYTAkVTMREwDwYDVQQKDAhGTk1ULVJDTTEZMBcGA1UECwwQQUMgUkFJWiBG
+Tk1ULVJDTTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBALpxgHpMhm5/
+yBNtwMZ9HACXjywMI7sQmkCpGreHiPibVmr75nuOi5KOpyVdWRHbNi63URcfqQgf
+BBckWKo3Shjf5TnUV/3XwSyRAZHiItQDwFj8d0fsjz50Q7qsNI1NOHZnjrDIbzAz
+WHFctPVrbtQBULgTfmxKo0nRIBnuvMApGGWn3v7v3QqQIecaZ5JCEJhfTzC8PhxF
+tBDXaEAUwED653cXeuYLj2VbPNmaUtu1vZ5Gzz3rkQUCwJaydkxNEJY7kvqcfw+Z
+374jNUUeAlz+taibmSXaXvMiwzn15Cou08YfxGyqxRxqAQVKL9LFwag0Jl1mpdIC
+IfkYtwb1TplvqKtMUejPUBjFd8g5CSxJkjKZqLsXF3mwWsXmo8RZZUc1g16p6DUL
+mbvkzSDGm0oGObVo/CK67lWMK07q87Hj/LaZmtVC+nFNCM+HHmpxffnTtOmlcYF7
+wk5HlqX2doWjKI/pgG6BU6VtX7hI+cL5NqYuSf+4lsKMB7ObiFj86xsc3i1w4peS
+MKGJ47xVqCfWS+2QrYv6YyVZLag13cqXM7zlzced0ezvXg5KkAYmY6252TUtB7p2
+ZSysV4999AeU14ECll2jB0nVetBX+RvnU0Z1qrB5QstocQjpYL05ac70r8NWQMet
+UqIJ5G+GR4of6ygnXYMgrwTJbFaai0b1AgMBAAGjgYMwgYAwDwYDVR0TAQH/BAUw
+AwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFPd9xf3E6Jobd2Sn9R2gzL+H
+YJptMD4GA1UdIAQ3MDUwMwYEVR0gADArMCkGCCsGAQUFBwIBFh1odHRwOi8vd3d3
+LmNlcnQuZm5tdC5lcy9kcGNzLzANBgkqhkiG9w0BAQsFAAOCAgEAB5BK3/MjTvDD
+nFFlm5wioooMhfNzKWtN/gHiqQxjAb8EZ6WdmF/9ARP67Jpi6Yb+tmLSbkyU+8B1
+RXxlDPiyN8+sD8+Nb/kZ94/sHvJwnvDKuO+3/3Y3dlv2bojzr2IyIpMNOmqOFGYM
+LVN0V2Ue1bLdI4E7pWYjJ2cJj+F3qkPNZVEI7VFY/uY5+ctHhKQV8Xa7pO6kO8Rf
+77IzlhEYt8llvhjho6Tc+hj507wTmzl6NLrTQfv6MooqtyuGC2mDOL7Nii4LcK2N
+JpLuHvUBKwrZ1pebbuCoGRw6IYsMHkCtA+fdZn71uSANA+iW+YJF1DngoABd15jm
+fZ5nc8OaKveri6E6FO80vFIOiZiaBECEHX5FaZNXzuvO+FB8TxxuBEOb+dY7Ixjp
+6o7RTUaN8Tvkasq6+yO3m/qZASlaWFot4/nUbQ4mrcFuNLwy+AwF+mWj2zs3gyLp
+1txyM/1d8iC9djwj2ij3+RvrWWTV3F9yfiD8zYm1kGdNYno/Tq0dwzn+evQoFt9B
+9kiABdcPUXmsEKvU7ANm5mqwujGSQkBqvjrTcuFqN1W8rB2Vt2lh8kORdOag0wok
+RqEIr9baRRmW1FMdW4R58MD3R++Lj8UGrp1MYp3/RgT408m2ECVAdf4WqslKYIYv
+uu8wd+RU4riEmViAqhOLUTpPSPaLtrM=
+-----END CERTIFICATE-----
+
+# Issuer: CN=Amazon Root CA 1 O=Amazon
+# Subject: CN=Amazon Root CA 1 O=Amazon
+# Label: "Amazon Root CA 1"
+# Serial: 143266978916655856878034712317230054538369994
+# MD5 Fingerprint: 43:c6:bf:ae:ec:fe:ad:2f:18:c6:88:68:30:fc:c8:e6
+# SHA1 Fingerprint: 8d:a7:f9:65:ec:5e:fc:37:91:0f:1c:6e:59:fd:c1:cc:6a:6e:de:16
+# SHA256 Fingerprint: 8e:cd:e6:88:4f:3d:87:b1:12:5b:a3:1a:c3:fc:b1:3d:70:16:de:7f:57:cc:90:4f:e1:cb:97:c6:ae:98:19:6e
+-----BEGIN CERTIFICATE-----
+MIIDQTCCAimgAwIBAgITBmyfz5m/jAo54vB4ikPmljZbyjANBgkqhkiG9w0BAQsF
+ADA5MQswCQYDVQQGEwJVUzEPMA0GA1UEChMGQW1hem9uMRkwFwYDVQQDExBBbWF6
+b24gUm9vdCBDQSAxMB4XDTE1MDUyNjAwMDAwMFoXDTM4MDExNzAwMDAwMFowOTEL
+MAkGA1UEBhMCVVMxDzANBgNVBAoTBkFtYXpvbjEZMBcGA1UEAxMQQW1hem9uIFJv
+b3QgQ0EgMTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBALJ4gHHKeNXj
+ca9HgFB0fW7Y14h29Jlo91ghYPl0hAEvrAIthtOgQ3pOsqTQNroBvo3bSMgHFzZM
+9O6II8c+6zf1tRn4SWiw3te5djgdYZ6k/oI2peVKVuRF4fn9tBb6dNqcmzU5L/qw
+IFAGbHrQgLKm+a/sRxmPUDgH3KKHOVj4utWp+UhnMJbulHheb4mjUcAwhmahRWa6
+VOujw5H5SNz/0egwLX0tdHA114gk957EWW67c4cX8jJGKLhD+rcdqsq08p8kDi1L
+93FcXmn/6pUCyziKrlA4b9v7LWIbxcceVOF34GfID5yHI9Y/QCB/IIDEgEw+OyQm
+jgSubJrIqg0CAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMC
+AYYwHQYDVR0OBBYEFIQYzIU07LwMlJQuCFmcx7IQTgoIMA0GCSqGSIb3DQEBCwUA
+A4IBAQCY8jdaQZChGsV2USggNiMOruYou6r4lK5IpDB/G/wkjUu0yKGX9rbxenDI
+U5PMCCjjmCXPI6T53iHTfIUJrU6adTrCC2qJeHZERxhlbI1Bjjt/msv0tadQ1wUs
+N+gDS63pYaACbvXy8MWy7Vu33PqUXHeeE6V/Uq2V8viTO96LXFvKWlJbYK8U90vv
+o/ufQJVtMVT8QtPHRh8jrdkPSHCa2XV4cdFyQzR1bldZwgJcJmApzyMZFo6IQ6XU
+5MsI+yMRQ+hDKXJioaldXgjUkK642M4UwtBV8ob2xJNDd2ZhwLnoQdeXeGADbkpy
+rqXRfboQnoZsG4q5WTP468SQvvG5
+-----END CERTIFICATE-----
+
+# Issuer: CN=Amazon Root CA 2 O=Amazon
+# Subject: CN=Amazon Root CA 2 O=Amazon
+# Label: "Amazon Root CA 2"
+# Serial: 143266982885963551818349160658925006970653239
+# MD5 Fingerprint: c8:e5:8d:ce:a8:42:e2:7a:c0:2a:5c:7c:9e:26:bf:66
+# SHA1 Fingerprint: 5a:8c:ef:45:d7:a6:98:59:76:7a:8c:8b:44:96:b5:78:cf:47:4b:1a
+# SHA256 Fingerprint: 1b:a5:b2:aa:8c:65:40:1a:82:96:01:18:f8:0b:ec:4f:62:30:4d:83:ce:c4:71:3a:19:c3:9c:01:1e:a4:6d:b4
+-----BEGIN CERTIFICATE-----
+MIIFQTCCAymgAwIBAgITBmyf0pY1hp8KD+WGePhbJruKNzANBgkqhkiG9w0BAQwF
+ADA5MQswCQYDVQQGEwJVUzEPMA0GA1UEChMGQW1hem9uMRkwFwYDVQQDExBBbWF6
+b24gUm9vdCBDQSAyMB4XDTE1MDUyNjAwMDAwMFoXDTQwMDUyNjAwMDAwMFowOTEL
+MAkGA1UEBhMCVVMxDzANBgNVBAoTBkFtYXpvbjEZMBcGA1UEAxMQQW1hem9uIFJv
+b3QgQ0EgMjCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAK2Wny2cSkxK
+gXlRmeyKy2tgURO8TW0G/LAIjd0ZEGrHJgw12MBvIITplLGbhQPDW9tK6Mj4kHbZ
+W0/jTOgGNk3Mmqw9DJArktQGGWCsN0R5hYGCrVo34A3MnaZMUnbqQ523BNFQ9lXg
+1dKmSYXpN+nKfq5clU1Imj+uIFptiJXZNLhSGkOQsL9sBbm2eLfq0OQ6PBJTYv9K
+8nu+NQWpEjTj82R0Yiw9AElaKP4yRLuH3WUnAnE72kr3H9rN9yFVkE8P7K6C4Z9r
+2UXTu/Bfh+08LDmG2j/e7HJV63mjrdvdfLC6HM783k81ds8P+HgfajZRRidhW+me
+z/CiVX18JYpvL7TFz4QuK/0NURBs+18bvBt+xa47mAExkv8LV/SasrlX6avvDXbR
+8O70zoan4G7ptGmh32n2M8ZpLpcTnqWHsFcQgTfJU7O7f/aS0ZzQGPSSbtqDT6Zj
+mUyl+17vIWR6IF9sZIUVyzfpYgwLKhbcAS4y2j5L9Z469hdAlO+ekQiG+r5jqFoz
+7Mt0Q5X5bGlSNscpb/xVA1wf+5+9R+vnSUeVC06JIglJ4PVhHvG/LopyboBZ/1c6
++XUyo05f7O0oYtlNc/LMgRdg7c3r3NunysV+Ar3yVAhU/bQtCSwXVEqY0VThUWcI
+0u1ufm8/0i2BWSlmy5A5lREedCf+3euvAgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMB
+Af8wDgYDVR0PAQH/BAQDAgGGMB0GA1UdDgQWBBSwDPBMMPQFWAJI/TPlUq9LhONm
+UjANBgkqhkiG9w0BAQwFAAOCAgEAqqiAjw54o+Ci1M3m9Zh6O+oAA7CXDpO8Wqj2
+LIxyh6mx/H9z/WNxeKWHWc8w4Q0QshNabYL1auaAn6AFC2jkR2vHat+2/XcycuUY
++gn0oJMsXdKMdYV2ZZAMA3m3MSNjrXiDCYZohMr/+c8mmpJ5581LxedhpxfL86kS
+k5Nrp+gvU5LEYFiwzAJRGFuFjWJZY7attN6a+yb3ACfAXVU3dJnJUH/jWS5E4ywl
+7uxMMne0nxrpS10gxdr9HIcWxkPo1LsmmkVwXqkLN1PiRnsn/eBG8om3zEK2yygm
+btmlyTrIQRNg91CMFa6ybRoVGld45pIq2WWQgj9sAq+uEjonljYE1x2igGOpm/Hl
+urR8FLBOybEfdF849lHqm/osohHUqS0nGkWxr7JOcQ3AWEbWaQbLU8uz/mtBzUF+
+fUwPfHJ5elnNXkoOrJupmHN5fLT0zLm4BwyydFy4x2+IoZCn9Kr5v2c69BoVYh63
+n749sSmvZ6ES8lgQGVMDMBu4Gon2nL2XA46jCfMdiyHxtN/kHNGfZQIG6lzWE7OE
+76KlXIx3KadowGuuQNKotOrN8I1LOJwZmhsoVLiJkO/KdYE+HvJkJMcYr07/R54H
+9jVlpNMKVv/1F2Rs76giJUmTtt8AF9pYfl3uxRuw0dFfIRDH+fO6AgonB8Xx1sfT
+4PsJYGw=
+-----END CERTIFICATE-----
+
+# Issuer: CN=Amazon Root CA 3 O=Amazon
+# Subject: CN=Amazon Root CA 3 O=Amazon
+# Label: "Amazon Root CA 3"
+# Serial: 143266986699090766294700635381230934788665930
+# MD5 Fingerprint: a0:d4:ef:0b:f7:b5:d8:49:95:2a:ec:f5:c4:fc:81:87
+# SHA1 Fingerprint: 0d:44:dd:8c:3c:8c:1a:1a:58:75:64:81:e9:0f:2e:2a:ff:b3:d2:6e
+# SHA256 Fingerprint: 18:ce:6c:fe:7b:f1:4e:60:b2:e3:47:b8:df:e8:68:cb:31:d0:2e:bb:3a:da:27:15:69:f5:03:43:b4:6d:b3:a4
+-----BEGIN CERTIFICATE-----
+MIIBtjCCAVugAwIBAgITBmyf1XSXNmY/Owua2eiedgPySjAKBggqhkjOPQQDAjA5
+MQswCQYDVQQGEwJVUzEPMA0GA1UEChMGQW1hem9uMRkwFwYDVQQDExBBbWF6b24g
+Um9vdCBDQSAzMB4XDTE1MDUyNjAwMDAwMFoXDTQwMDUyNjAwMDAwMFowOTELMAkG
+A1UEBhMCVVMxDzANBgNVBAoTBkFtYXpvbjEZMBcGA1UEAxMQQW1hem9uIFJvb3Qg
+Q0EgMzBZMBMGByqGSM49AgEGCCqGSM49AwEHA0IABCmXp8ZBf8ANm+gBG1bG8lKl
+ui2yEujSLtf6ycXYqm0fc4E7O5hrOXwzpcVOho6AF2hiRVd9RFgdszflZwjrZt6j
+QjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgGGMB0GA1UdDgQWBBSr
+ttvXBp43rDCGB5Fwx5zEGbF4wDAKBggqhkjOPQQDAgNJADBGAiEA4IWSoxe3jfkr
+BqWTrBqYaGFy+uGh0PsceGCmQ5nFuMQCIQCcAu/xlJyzlvnrxir4tiz+OpAUFteM
+YyRIHN8wfdVoOw==
+-----END CERTIFICATE-----
+
+# Issuer: CN=Amazon Root CA 4 O=Amazon
+# Subject: CN=Amazon Root CA 4 O=Amazon
+# Label: "Amazon Root CA 4"
+# Serial: 143266989758080763974105200630763877849284878
+# MD5 Fingerprint: 89:bc:27:d5:eb:17:8d:06:6a:69:d5:fd:89:47:b4:cd
+# SHA1 Fingerprint: f6:10:84:07:d6:f8:bb:67:98:0c:c2:e2:44:c2:eb:ae:1c:ef:63:be
+# SHA256 Fingerprint: e3:5d:28:41:9e:d0:20:25:cf:a6:90:38:cd:62:39:62:45:8d:a5:c6:95:fb:de:a3:c2:2b:0b:fb:25:89:70:92
+-----BEGIN CERTIFICATE-----
+MIIB8jCCAXigAwIBAgITBmyf18G7EEwpQ+Vxe3ssyBrBDjAKBggqhkjOPQQDAzA5
+MQswCQYDVQQGEwJVUzEPMA0GA1UEChMGQW1hem9uMRkwFwYDVQQDExBBbWF6b24g
+Um9vdCBDQSA0MB4XDTE1MDUyNjAwMDAwMFoXDTQwMDUyNjAwMDAwMFowOTELMAkG
+A1UEBhMCVVMxDzANBgNVBAoTBkFtYXpvbjEZMBcGA1UEAxMQQW1hem9uIFJvb3Qg
+Q0EgNDB2MBAGByqGSM49AgEGBSuBBAAiA2IABNKrijdPo1MN/sGKe0uoe0ZLY7Bi
+9i0b2whxIdIA6GO9mif78DluXeo9pcmBqqNbIJhFXRbb/egQbeOc4OO9X4Ri83Bk
+M6DLJC9wuoihKqB1+IGuYgbEgds5bimwHvouXKNCMEAwDwYDVR0TAQH/BAUwAwEB
+/zAOBgNVHQ8BAf8EBAMCAYYwHQYDVR0OBBYEFNPsxzplbszh2naaVvuc84ZtV+WB
+MAoGCCqGSM49BAMDA2gAMGUCMDqLIfG9fhGt0O9Yli/W651+kI0rz2ZVwyzjKKlw
+CkcO8DdZEv8tmZQoTipPNU0zWgIxAOp1AE47xDqUEpHJWEadIRNyp4iciuRMStuW
+1KyLa2tJElMzrdfkviT8tQp21KW8EA==
+-----END CERTIFICATE-----
+
+# Issuer: CN=TUBITAK Kamu SM SSL Kok Sertifikasi - Surum 1 O=Turkiye Bilimsel ve Teknolojik Arastirma Kurumu - TUBITAK OU=Kamu Sertifikasyon Merkezi - Kamu SM
+# Subject: CN=TUBITAK Kamu SM SSL Kok Sertifikasi - Surum 1 O=Turkiye Bilimsel ve Teknolojik Arastirma Kurumu - TUBITAK OU=Kamu Sertifikasyon Merkezi - Kamu SM
+# Label: "TUBITAK Kamu SM SSL Kok Sertifikasi - Surum 1"
+# Serial: 1
+# MD5 Fingerprint: dc:00:81:dc:69:2f:3e:2f:b0:3b:f6:3d:5a:91:8e:49
+# SHA1 Fingerprint: 31:43:64:9b:ec:ce:27:ec:ed:3a:3f:0b:8f:0d:e4:e8:91:dd:ee:ca
+# SHA256 Fingerprint: 46:ed:c3:68:90:46:d5:3a:45:3f:b3:10:4a:b8:0d:ca:ec:65:8b:26:60:ea:16:29:dd:7e:86:79:90:64:87:16
+-----BEGIN CERTIFICATE-----
+MIIEYzCCA0ugAwIBAgIBATANBgkqhkiG9w0BAQsFADCB0jELMAkGA1UEBhMCVFIx
+GDAWBgNVBAcTD0dlYnplIC0gS29jYWVsaTFCMEAGA1UEChM5VHVya2l5ZSBCaWxp
+bXNlbCB2ZSBUZWtub2xvamlrIEFyYXN0aXJtYSBLdXJ1bXUgLSBUVUJJVEFLMS0w
+KwYDVQQLEyRLYW11IFNlcnRpZmlrYXN5b24gTWVya2V6aSAtIEthbXUgU00xNjA0
+BgNVBAMTLVRVQklUQUsgS2FtdSBTTSBTU0wgS29rIFNlcnRpZmlrYXNpIC0gU3Vy
+dW0gMTAeFw0xMzExMjUwODI1NTVaFw00MzEwMjUwODI1NTVaMIHSMQswCQYDVQQG
+EwJUUjEYMBYGA1UEBxMPR2ViemUgLSBLb2NhZWxpMUIwQAYDVQQKEzlUdXJraXll
+IEJpbGltc2VsIHZlIFRla25vbG9qaWsgQXJhc3Rpcm1hIEt1cnVtdSAtIFRVQklU
+QUsxLTArBgNVBAsTJEthbXUgU2VydGlmaWthc3lvbiBNZXJrZXppIC0gS2FtdSBT
+TTE2MDQGA1UEAxMtVFVCSVRBSyBLYW11IFNNIFNTTCBLb2sgU2VydGlmaWthc2kg
+LSBTdXJ1bSAxMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAr3UwM6q7
+a9OZLBI3hNmNe5eA027n/5tQlT6QlVZC1xl8JoSNkvoBHToP4mQ4t4y86Ij5iySr
+LqP1N+RAjhgleYN1Hzv/bKjFxlb4tO2KRKOrbEz8HdDc72i9z+SqzvBV96I01INr
+N3wcwv61A+xXzry0tcXtAA9TNypN9E8Mg/uGz8v+jE69h/mniyFXnHrfA2eJLJ2X
+YacQuFWQfw4tJzh03+f92k4S400VIgLI4OD8D62K18lUUMw7D8oWgITQUVbDjlZ/
+iSIzL+aFCr2lqBs23tPcLG07xxO9WSMs5uWk99gL7eqQQESolbuT1dCANLZGeA4f
+AJNG4e7p+exPFwIDAQABo0IwQDAdBgNVHQ4EFgQUZT/HiobGPN08VFw1+DrtUgxH
+V8gwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEL
+BQADggEBACo/4fEyjq7hmFxLXs9rHmoJ0iKpEsdeV31zVmSAhHqT5Am5EM2fKifh
+AHe+SMg1qIGf5LgsyX8OsNJLN13qudULXjS99HMpw+0mFZx+CFOKWI3QSyjfwbPf
+IPP54+M638yclNhOT8NrF7f3cuitZjO1JVOr4PhMqZ398g26rrnZqsZr+ZO7rqu4
+lzwDGrpDxpa5RXI4s6ehlj2Re37AIVNMh+3yC1SVUZPVIqUNivGTDj5UDrDYyU7c
+8jEyVupk+eq1nRZmQnLzf9OxMUP8pI4X8W0jq5Rm+K37DwhuJi1/FwcJsoz7UMCf
+lo3Ptv0AnVoUmr8CRPXBwp8iXqIPoeM=
+-----END CERTIFICATE-----
+
+# Issuer: CN=GDCA TrustAUTH R5 ROOT O=GUANG DONG CERTIFICATE AUTHORITY CO.,LTD.
+# Subject: CN=GDCA TrustAUTH R5 ROOT O=GUANG DONG CERTIFICATE AUTHORITY CO.,LTD.
+# Label: "GDCA TrustAUTH R5 ROOT"
+# Serial: 9009899650740120186
+# MD5 Fingerprint: 63:cc:d9:3d:34:35:5c:6f:53:a3:e2:08:70:48:1f:b4
+# SHA1 Fingerprint: 0f:36:38:5b:81:1a:25:c3:9b:31:4e:83:ca:e9:34:66:70:cc:74:b4
+# SHA256 Fingerprint: bf:ff:8f:d0:44:33:48:7d:6a:8a:a6:0c:1a:29:76:7a:9f:c2:bb:b0:5e:42:0f:71:3a:13:b9:92:89:1d:38:93
+-----BEGIN CERTIFICATE-----
+MIIFiDCCA3CgAwIBAgIIfQmX/vBH6nowDQYJKoZIhvcNAQELBQAwYjELMAkGA1UE
+BhMCQ04xMjAwBgNVBAoMKUdVQU5HIERPTkcgQ0VSVElGSUNBVEUgQVVUSE9SSVRZ
+IENPLixMVEQuMR8wHQYDVQQDDBZHRENBIFRydXN0QVVUSCBSNSBST09UMB4XDTE0
+MTEyNjA1MTMxNVoXDTQwMTIzMTE1NTk1OVowYjELMAkGA1UEBhMCQ04xMjAwBgNV
+BAoMKUdVQU5HIERPTkcgQ0VSVElGSUNBVEUgQVVUSE9SSVRZIENPLixMVEQuMR8w
+HQYDVQQDDBZHRENBIFRydXN0QVVUSCBSNSBST09UMIICIjANBgkqhkiG9w0BAQEF
+AAOCAg8AMIICCgKCAgEA2aMW8Mh0dHeb7zMNOwZ+Vfy1YI92hhJCfVZmPoiC7XJj
+Dp6L3TQsAlFRwxn9WVSEyfFrs0yw6ehGXTjGoqcuEVe6ghWinI9tsJlKCvLriXBj
+TnnEt1u9ol2x8kECK62pOqPseQrsXzrj/e+APK00mxqriCZ7VqKChh/rNYmDf1+u
+KU49tm7srsHwJ5uu4/Ts765/94Y9cnrrpftZTqfrlYwiOXnhLQiPzLyRuEH3FMEj
+qcOtmkVEs7LXLM3GKeJQEK5cy4KOFxg2fZfmiJqwTTQJ9Cy5WmYqsBebnh52nUpm
+MUHfP/vFBu8btn4aRjb3ZGM74zkYI+dndRTVdVeSN72+ahsmUPI2JgaQxXABZG12
+ZuGR224HwGGALrIuL4xwp9E7PLOR5G62xDtw8mySlwnNR30YwPO7ng/Wi64HtloP
+zgsMR6flPri9fcebNaBhlzpBdRfMK5Z3KpIhHtmVdiBnaM8Nvd/WHwlqmuLMc3Gk
+L30SgLdTMEZeS1SZD2fJpcjyIMGC7J0R38IC+xo70e0gmu9lZJIQDSri3nDxGGeC
+jGHeuLzRL5z7D9Ar7Rt2ueQ5Vfj4oR24qoAATILnsn8JuLwwoC8N9VKejveSswoA
+HQBUlwbgsQfZxw9cZX08bVlX5O2ljelAU58VS6Bx9hoh49pwBiFYFIeFd3mqgnkC
+AwEAAaNCMEAwHQYDVR0OBBYEFOLJQJ9NzuiaoXzPDj9lxSmIahlRMA8GA1UdEwEB
+/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgGGMA0GCSqGSIb3DQEBCwUAA4ICAQDRSVfg
+p8xoWLoBDysZzY2wYUWsEe1jUGn4H3++Fo/9nesLqjJHdtJnJO29fDMylyrHBYZm
+DRd9FBUb1Ov9H5r2XpdptxolpAqzkT9fNqyL7FeoPueBihhXOYV0GkLH6VsTX4/5
+COmSdI31R9KrO9b7eGZONn356ZLpBN79SWP8bfsUcZNnL0dKt7n/HipzcEYwv1ry
+L3ml4Y0M2fmyYzeMN2WFcGpcWwlyua1jPLHd+PwyvzeG5LuOmCd+uh8W4XAR8gPf
+JWIyJyYYMoSf/wA6E7qaTfRPuBRwIrHKK5DOKcFw9C+df/KQHtZa37dG/OaG+svg
+IHZ6uqbL9XzeYqWxi+7egmaKTjowHz+Ay60nugxe19CxVsp3cbK1daFQqUBDF8Io
+2c9Si1vIY9RCPqAzekYu9wogRlR+ak8x8YF+QnQ4ZXMn7sZ8uI7XpTrXmKGcjBBV
+09tL7ECQ8s1uV9JiDnxXk7Gnbc2dg7sq5+W2O3FYrf3RRbxake5TFW/TRQl1brqQ
+XR4EzzffHqhmsYzmIGrv/EhOdJhCrylvLmrH+33RZjEizIYAfmaDDEL0vTSSwxrq
+T8p+ck0LcIymSLumoRT2+1hEmRSuqguTaaApJUqlyyvdimYHFngVV3Eb7PVHhPOe
+MTd61X8kreS8/f3MboPoDKi3QWwH3b08hpcv0g==
+-----END CERTIFICATE-----
+
+# Issuer: CN=SSL.com Root Certification Authority RSA O=SSL Corporation
+# Subject: CN=SSL.com Root Certification Authority RSA O=SSL Corporation
+# Label: "SSL.com Root Certification Authority RSA"
+# Serial: 8875640296558310041
+# MD5 Fingerprint: 86:69:12:c0:70:f1:ec:ac:ac:c2:d5:bc:a5:5b:a1:29
+# SHA1 Fingerprint: b7:ab:33:08:d1:ea:44:77:ba:14:80:12:5a:6f:bd:a9:36:49:0c:bb
+# SHA256 Fingerprint: 85:66:6a:56:2e:e0:be:5c:e9:25:c1:d8:89:0a:6f:76:a8:7e:c1:6d:4d:7d:5f:29:ea:74:19:cf:20:12:3b:69
+-----BEGIN CERTIFICATE-----
+MIIF3TCCA8WgAwIBAgIIeyyb0xaAMpkwDQYJKoZIhvcNAQELBQAwfDELMAkGA1UE
+BhMCVVMxDjAMBgNVBAgMBVRleGFzMRAwDgYDVQQHDAdIb3VzdG9uMRgwFgYDVQQK
+DA9TU0wgQ29ycG9yYXRpb24xMTAvBgNVBAMMKFNTTC5jb20gUm9vdCBDZXJ0aWZp
+Y2F0aW9uIEF1dGhvcml0eSBSU0EwHhcNMTYwMjEyMTczOTM5WhcNNDEwMjEyMTcz
+OTM5WjB8MQswCQYDVQQGEwJVUzEOMAwGA1UECAwFVGV4YXMxEDAOBgNVBAcMB0hv
+dXN0b24xGDAWBgNVBAoMD1NTTCBDb3Jwb3JhdGlvbjExMC8GA1UEAwwoU1NMLmNv
+bSBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IFJTQTCCAiIwDQYJKoZIhvcN
+AQEBBQADggIPADCCAgoCggIBAPkP3aMrfcvQKv7sZ4Wm5y4bunfh4/WvpOz6Sl2R
+xFdHaxh3a3by/ZPkPQ/CFp4LZsNWlJ4Xg4XOVu/yFv0AYvUiCVToZRdOQbngT0aX
+qhvIuG5iXmmxX9sqAn78bMrzQdjt0Oj8P2FI7bADFB0QDksZ4LtO7IZl/zbzXmcC
+C52GVWH9ejjt/uIZALdvoVBidXQ8oPrIJZK0bnoix/geoeOy3ZExqysdBP+lSgQ3
+6YWkMyv94tZVNHwZpEpox7Ko07fKoZOI68GXvIz5HdkihCR0xwQ9aqkpk8zruFvh
+/l8lqjRYyMEjVJ0bmBHDOJx+PYZspQ9AhnwC9FwCTyjLrnGfDzrIM/4RJTXq/LrF
+YD3ZfBjVsqnTdXgDciLKOsMf7yzlLqn6niy2UUb9rwPW6mBo6oUWNmuF6R7As93E
+JNyAKoFBbZQ+yODJgUEAnl6/f8UImKIYLEJAs/lvOCdLToD0PYFH4Ih86hzOtXVc
+US4cK38acijnALXRdMbX5J+tB5O2UzU1/Dfkw/ZdFr4hc96SCvigY2q8lpJqPvi8
+ZVWb3vUNiSYE/CUapiVpy8JtynziWV+XrOvvLsi81xtZPCvM8hnIk2snYxnP/Okm
++Mpxm3+T/jRnhE6Z6/yzeAkzcLpmpnbtG3PrGqUNxCITIJRWCk4sbE6x/c+cCbqi
+M+2HAgMBAAGjYzBhMB0GA1UdDgQWBBTdBAkHovV6fVJTEpKV7jiAJQ2mWTAPBgNV
+HRMBAf8EBTADAQH/MB8GA1UdIwQYMBaAFN0ECQei9Xp9UlMSkpXuOIAlDaZZMA4G
+A1UdDwEB/wQEAwIBhjANBgkqhkiG9w0BAQsFAAOCAgEAIBgRlCn7Jp0cHh5wYfGV
+cpNxJK1ok1iOMq8bs3AD/CUrdIWQPXhq9LmLpZc7tRiRux6n+UBbkflVma8eEdBc
+Hadm47GUBwwyOabqG7B52B2ccETjit3E+ZUfijhDPwGFpUenPUayvOUiaPd7nNgs
+PgohyC0zrL/FgZkxdMF1ccW+sfAjRfSda/wZY52jvATGGAslu1OJD7OAUN5F7kR/
+q5R4ZJjT9ijdh9hwZXT7DrkT66cPYakylszeu+1jTBi7qUD3oFRuIIhxdRjqerQ0
+cuAjJ3dctpDqhiVAq+8zD8ufgr6iIPv2tS0a5sKFsXQP+8hlAqRSAUfdSSLBv9jr
+a6x+3uxjMxW3IwiPxg+NQVrdjsW5j+VFP3jbutIbQLH+cU0/4IGiul607BXgk90I
+H37hVZkLId6Tngr75qNJvTYw/ud3sqB1l7UtgYgXZSD32pAAn8lSzDLKNXz1PQ/Y
+K9f1JmzJBjSWFupwWRoyeXkLtoh/D1JIPb9s2KJELtFOt3JY04kTlf5Eq/jXixtu
+nLwsoFvVagCvXzfh1foQC5ichucmj87w7G6KVwuA406ywKBjYZC6VWg3dGq2ktuf
+oYYitmUnDuy2n0Jg5GfCtdpBC8TTi2EbvPofkSvXRAdeuims2cXp71NIWuuA8ShY
+Ic2wBlX7Jz9TkHCpBB5XJ7k=
+-----END CERTIFICATE-----
+
+# Issuer: CN=SSL.com Root Certification Authority ECC O=SSL Corporation
+# Subject: CN=SSL.com Root Certification Authority ECC O=SSL Corporation
+# Label: "SSL.com Root Certification Authority ECC"
+# Serial: 8495723813297216424
+# MD5 Fingerprint: 2e:da:e4:39:7f:9c:8f:37:d1:70:9f:26:17:51:3a:8e
+# SHA1 Fingerprint: c3:19:7c:39:24:e6:54:af:1b:c4:ab:20:95:7a:e2:c3:0e:13:02:6a
+# SHA256 Fingerprint: 34:17:bb:06:cc:60:07:da:1b:96:1c:92:0b:8a:b4:ce:3f:ad:82:0e:4a:a3:0b:9a:cb:c4:a7:4e:bd:ce:bc:65
+-----BEGIN CERTIFICATE-----
+MIICjTCCAhSgAwIBAgIIdebfy8FoW6gwCgYIKoZIzj0EAwIwfDELMAkGA1UEBhMC
+VVMxDjAMBgNVBAgMBVRleGFzMRAwDgYDVQQHDAdIb3VzdG9uMRgwFgYDVQQKDA9T
+U0wgQ29ycG9yYXRpb24xMTAvBgNVBAMMKFNTTC5jb20gUm9vdCBDZXJ0aWZpY2F0
+aW9uIEF1dGhvcml0eSBFQ0MwHhcNMTYwMjEyMTgxNDAzWhcNNDEwMjEyMTgxNDAz
+WjB8MQswCQYDVQQGEwJVUzEOMAwGA1UECAwFVGV4YXMxEDAOBgNVBAcMB0hvdXN0
+b24xGDAWBgNVBAoMD1NTTCBDb3Jwb3JhdGlvbjExMC8GA1UEAwwoU1NMLmNvbSBS
+b290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IEVDQzB2MBAGByqGSM49AgEGBSuB
+BAAiA2IABEVuqVDEpiM2nl8ojRfLliJkP9x6jh3MCLOicSS6jkm5BBtHllirLZXI
+7Z4INcgn64mMU1jrYor+8FsPazFSY0E7ic3s7LaNGdM0B9y7xgZ/wkWV7Mt/qCPg
+CemB+vNH06NjMGEwHQYDVR0OBBYEFILRhXMw5zUE044CkvvlpNHEIejNMA8GA1Ud
+EwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAUgtGFczDnNQTTjgKS++Wk0cQh6M0wDgYD
+VR0PAQH/BAQDAgGGMAoGCCqGSM49BAMCA2cAMGQCMG/n61kRpGDPYbCWe+0F+S8T
+kdzt5fxQaxFGRrMcIQBiu77D5+jNB5n5DQtdcj7EqgIwH7y6C+IwJPt8bYBVCpk+
+gA0z5Wajs6O7pdWLjwkspl1+4vAHCGht0nxpbl/f5Wpl
+-----END CERTIFICATE-----
+
+# Issuer: CN=SSL.com EV Root Certification Authority RSA R2 O=SSL Corporation
+# Subject: CN=SSL.com EV Root Certification Authority RSA R2 O=SSL Corporation
+# Label: "SSL.com EV Root Certification Authority RSA R2"
+# Serial: 6248227494352943350
+# MD5 Fingerprint: e1:1e:31:58:1a:ae:54:53:02:f6:17:6a:11:7b:4d:95
+# SHA1 Fingerprint: 74:3a:f0:52:9b:d0:32:a0:f4:4a:83:cd:d4:ba:a9:7b:7c:2e:c4:9a
+# SHA256 Fingerprint: 2e:7b:f1:6c:c2:24:85:a7:bb:e2:aa:86:96:75:07:61:b0:ae:39:be:3b:2f:e9:d0:cc:6d:4e:f7:34:91:42:5c
+-----BEGIN CERTIFICATE-----
+MIIF6zCCA9OgAwIBAgIIVrYpzTS8ePYwDQYJKoZIhvcNAQELBQAwgYIxCzAJBgNV
+BAYTAlVTMQ4wDAYDVQQIDAVUZXhhczEQMA4GA1UEBwwHSG91c3RvbjEYMBYGA1UE
+CgwPU1NMIENvcnBvcmF0aW9uMTcwNQYDVQQDDC5TU0wuY29tIEVWIFJvb3QgQ2Vy
+dGlmaWNhdGlvbiBBdXRob3JpdHkgUlNBIFIyMB4XDTE3MDUzMTE4MTQzN1oXDTQy
+MDUzMDE4MTQzN1owgYIxCzAJBgNVBAYTAlVTMQ4wDAYDVQQIDAVUZXhhczEQMA4G
+A1UEBwwHSG91c3RvbjEYMBYGA1UECgwPU1NMIENvcnBvcmF0aW9uMTcwNQYDVQQD
+DC5TU0wuY29tIEVWIFJvb3QgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkgUlNBIFIy
+MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAjzZlQOHWTcDXtOlG2mvq
+M0fNTPl9fb69LT3w23jhhqXZuglXaO1XPqDQCEGD5yhBJB/jchXQARr7XnAjssuf
+OePPxU7Gkm0mxnu7s9onnQqG6YE3Bf7wcXHswxzpY6IXFJ3vG2fThVUCAtZJycxa
+4bH3bzKfydQ7iEGonL3Lq9ttewkfokxykNorCPzPPFTOZw+oz12WGQvE43LrrdF9
+HSfvkusQv1vrO6/PgN3B0pYEW3p+pKk8OHakYo6gOV7qd89dAFmPZiw+B6KjBSYR
+aZfqhbcPlgtLyEDhULouisv3D5oi53+aNxPN8k0TayHRwMwi8qFG9kRpnMphNQcA
+b9ZhCBHqurj26bNg5U257J8UZslXWNvNh2n4ioYSA0e/ZhN2rHd9NCSFg83XqpyQ
+Gp8hLH94t2S42Oim9HizVcuE0jLEeK6jj2HdzghTreyI/BXkmg3mnxp3zkyPuBQV
+PWKchjgGAGYS5Fl2WlPAApiiECtoRHuOec4zSnaqW4EWG7WK2NAAe15itAnWhmMO
+pgWVSbooi4iTsjQc2KRVbrcc0N6ZVTsj9CLg+SlmJuwgUHfbSguPvuUCYHBBXtSu
+UDkiFCbLsjtzdFVHB3mBOagwE0TlBIqulhMlQg+5U8Sb/M3kHN48+qvWBkofZ6aY
+MBzdLNvcGJVXZsb/XItW9XcCAwEAAaNjMGEwDwYDVR0TAQH/BAUwAwEB/zAfBgNV
+HSMEGDAWgBT5YLvU49U09rj1BoAlp3PbRmmonjAdBgNVHQ4EFgQU+WC71OPVNPa4
+9QaAJadz20ZpqJ4wDgYDVR0PAQH/BAQDAgGGMA0GCSqGSIb3DQEBCwUAA4ICAQBW
+s47LCp1Jjr+kxJG7ZhcFUZh1++VQLHqe8RT6q9OKPv+RKY9ji9i0qVQBDb6Thi/5
+Sm3HXvVX+cpVHBK+Rw82xd9qt9t1wkclf7nxY/hoLVUE0fKNsKTPvDxeH3jnpaAg
+cLAExbf3cqfeIg29MyVGjGSSJuM+LmOW2puMPfgYCdcDzH2GguDKBAdRUNf/ktUM
+79qGn5nX67evaOI5JpS6aLe/g9Pqemc9YmeuJeVy6OLk7K4S9ksrPJ/psEDzOFSz
+/bdoyNrGj1E8svuR3Bznm53htw1yj+KkxKl4+esUrMZDBcJlOSgYAsOCsp0FvmXt
+ll9ldDz7CTUue5wT/RsPXcdtgTpWD8w74a8CLyKsRspGPKAcTNZEtF4uXBVmCeEm
+Kf7GUmG6sXP/wwyc5WxqlD8UykAWlYTzWamsX0xhk23RO8yilQwipmdnRC652dKK
+QbNmC1r7fSOl8hqw/96bg5Qu0T/fkreRrwU7ZcegbLHNYhLDkBvjJc40vG93drEQ
+w/cFGsDWr3RiSBd3kmmQYRzelYB0VI8YHMPzA9C/pEN1hlMYegouCRw2n5H9gooi
+S9EOUCXdywMMF8mDAAhONU2Ki+3wApRmLER/y5UnlhetCTCstnEXbosX9hwJ1C07
+mKVx01QT2WDz9UtmT/rx7iASjbSsV7FFY6GsdqnC+w==
+-----END CERTIFICATE-----
+
+# Issuer: CN=SSL.com EV Root Certification Authority ECC O=SSL Corporation
+# Subject: CN=SSL.com EV Root Certification Authority ECC O=SSL Corporation
+# Label: "SSL.com EV Root Certification Authority ECC"
+# Serial: 3182246526754555285
+# MD5 Fingerprint: 59:53:22:65:83:42:01:54:c0:ce:42:b9:5a:7c:f2:90
+# SHA1 Fingerprint: 4c:dd:51:a3:d1:f5:20:32:14:b0:c6:c5:32:23:03:91:c7:46:42:6d
+# SHA256 Fingerprint: 22:a2:c1:f7:bd:ed:70:4c:c1:e7:01:b5:f4:08:c3:10:88:0f:e9:56:b5:de:2a:4a:44:f9:9c:87:3a:25:a7:c8
+-----BEGIN CERTIFICATE-----
+MIIClDCCAhqgAwIBAgIILCmcWxbtBZUwCgYIKoZIzj0EAwIwfzELMAkGA1UEBhMC
+VVMxDjAMBgNVBAgMBVRleGFzMRAwDgYDVQQHDAdIb3VzdG9uMRgwFgYDVQQKDA9T
+U0wgQ29ycG9yYXRpb24xNDAyBgNVBAMMK1NTTC5jb20gRVYgUm9vdCBDZXJ0aWZp
+Y2F0aW9uIEF1dGhvcml0eSBFQ0MwHhcNMTYwMjEyMTgxNTIzWhcNNDEwMjEyMTgx
+NTIzWjB/MQswCQYDVQQGEwJVUzEOMAwGA1UECAwFVGV4YXMxEDAOBgNVBAcMB0hv
+dXN0b24xGDAWBgNVBAoMD1NTTCBDb3Jwb3JhdGlvbjE0MDIGA1UEAwwrU1NMLmNv
+bSBFViBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IEVDQzB2MBAGByqGSM49
+AgEGBSuBBAAiA2IABKoSR5CYG/vvw0AHgyBO8TCCogbR8pKGYfL2IWjKAMTH6kMA
+VIbc/R/fALhBYlzccBYy3h+Z1MzFB8gIH2EWB1E9fVwHU+M1OIzfzZ/ZLg1Kthku
+WnBaBu2+8KGwytAJKaNjMGEwHQYDVR0OBBYEFFvKXuXe0oGqzagtZFG22XKbl+ZP
+MA8GA1UdEwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAUW8pe5d7SgarNqC1kUbbZcpuX
+5k8wDgYDVR0PAQH/BAQDAgGGMAoGCCqGSM49BAMCA2gAMGUCMQCK5kCJN+vp1RPZ
+ytRrJPOwPYdGWBrssd9v+1a6cGvHOMzosYxPD/fxZ3YOg9AeUY8CMD32IygmTMZg
+h5Mmm7I1HrrW9zzRHM76JTymGoEVW/MSD2zuZYrJh6j5B+BimoxcSg==
+-----END CERTIFICATE-----
+
+# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R6
+# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R6
+# Label: "GlobalSign Root CA - R6"
+# Serial: 1417766617973444989252670301619537
+# MD5 Fingerprint: 4f:dd:07:e4:d4:22:64:39:1e:0c:37:42:ea:d1:c6:ae
+# SHA1 Fingerprint: 80:94:64:0e:b5:a7:a1:ca:11:9c:1f:dd:d5:9f:81:02:63:a7:fb:d1
+# SHA256 Fingerprint: 2c:ab:ea:fe:37:d0:6c:a2:2a:ba:73:91:c0:03:3d:25:98:29:52:c4:53:64:73:49:76:3a:3a:b5:ad:6c:cf:69
+-----BEGIN CERTIFICATE-----
+MIIFgzCCA2ugAwIBAgIORea7A4Mzw4VlSOb/RVEwDQYJKoZIhvcNAQEMBQAwTDEg
+MB4GA1UECxMXR2xvYmFsU2lnbiBSb290IENBIC0gUjYxEzARBgNVBAoTCkdsb2Jh
+bFNpZ24xEzARBgNVBAMTCkdsb2JhbFNpZ24wHhcNMTQxMjEwMDAwMDAwWhcNMzQx
+MjEwMDAwMDAwWjBMMSAwHgYDVQQLExdHbG9iYWxTaWduIFJvb3QgQ0EgLSBSNjET
+MBEGA1UEChMKR2xvYmFsU2lnbjETMBEGA1UEAxMKR2xvYmFsU2lnbjCCAiIwDQYJ
+KoZIhvcNAQEBBQADggIPADCCAgoCggIBAJUH6HPKZvnsFMp7PPcNCPG0RQssgrRI
+xutbPK6DuEGSMxSkb3/pKszGsIhrxbaJ0cay/xTOURQh7ErdG1rG1ofuTToVBu1k
+ZguSgMpE3nOUTvOniX9PeGMIyBJQbUJmL025eShNUhqKGoC3GYEOfsSKvGRMIRxD
+aNc9PIrFsmbVkJq3MQbFvuJtMgamHvm566qjuL++gmNQ0PAYid/kD3n16qIfKtJw
+LnvnvJO7bVPiSHyMEAc4/2ayd2F+4OqMPKq0pPbzlUoSB239jLKJz9CgYXfIWHSw
+1CM69106yqLbnQneXUQtkPGBzVeS+n68UARjNN9rkxi+azayOeSsJDa38O+2HBNX
+k7besvjihbdzorg1qkXy4J02oW9UivFyVm4uiMVRQkQVlO6jxTiWm05OWgtH8wY2
+SXcwvHE35absIQh1/OZhFj931dmRl4QKbNQCTXTAFO39OfuD8l4UoQSwC+n+7o/h
+bguyCLNhZglqsQY6ZZZZwPA1/cnaKI0aEYdwgQqomnUdnjqGBQCe24DWJfncBZ4n
+WUx2OVvq+aWh2IMP0f/fMBH5hc8zSPXKbWQULHpYT9NLCEnFlWQaYw55PfWzjMpY
+rZxCRXluDocZXFSxZba/jJvcE+kNb7gu3GduyYsRtYQUigAZcIN5kZeR1Bonvzce
+MgfYFGM8KEyvAgMBAAGjYzBhMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8EBTAD
+AQH/MB0GA1UdDgQWBBSubAWjkxPioufi1xzWx/B/yGdToDAfBgNVHSMEGDAWgBSu
+bAWjkxPioufi1xzWx/B/yGdToDANBgkqhkiG9w0BAQwFAAOCAgEAgyXt6NH9lVLN
+nsAEoJFp5lzQhN7craJP6Ed41mWYqVuoPId8AorRbrcWc+ZfwFSY1XS+wc3iEZGt
+Ixg93eFyRJa0lV7Ae46ZeBZDE1ZXs6KzO7V33EByrKPrmzU+sQghoefEQzd5Mr61
+55wsTLxDKZmOMNOsIeDjHfrYBzN2VAAiKrlNIC5waNrlU/yDXNOd8v9EDERm8tLj
+vUYAGm0CuiVdjaExUd1URhxN25mW7xocBFymFe944Hn+Xds+qkxV/ZoVqW/hpvvf
+cDDpw+5CRu3CkwWJ+n1jez/QcYF8AOiYrg54NMMl+68KnyBr3TsTjxKM4kEaSHpz
+oHdpx7Zcf4LIHv5YGygrqGytXm3ABdJ7t+uA/iU3/gKbaKxCXcPu9czc8FB10jZp
+nOZ7BN9uBmm23goJSFmH63sUYHpkqmlD75HHTOwY3WzvUy2MmeFe8nI+z1TIvWfs
+pA9MRf/TuTAjB0yPEL+GltmZWrSZVxykzLsViVO6LAUP5MSeGbEYNNVMnbrt9x+v
+JJUEeKgDu+6B5dpffItKoZB0JaezPkvILFa9x8jvOOJckvB595yEunQtYQEgfn7R
+8k8HWV+LLUNS60YMlOH1Zkd5d9VUWx+tJDfLRVpOoERIyNiwmcUVhAn21klJwGW4
+5hpxbqCo8YLoRT5s1gLXCmeDBVrJpBA=
+-----END CERTIFICATE-----
+
+# Issuer: CN=OISTE WISeKey Global Root GC CA O=WISeKey OU=OISTE Foundation Endorsed
+# Subject: CN=OISTE WISeKey Global Root GC CA O=WISeKey OU=OISTE Foundation Endorsed
+# Label: "OISTE WISeKey Global Root GC CA"
+# Serial: 44084345621038548146064804565436152554
+# MD5 Fingerprint: a9:d6:b9:2d:2f:93:64:f8:a5:69:ca:91:e9:68:07:23
+# SHA1 Fingerprint: e0:11:84:5e:34:de:be:88:81:b9:9c:f6:16:26:d1:96:1f:c3:b9:31
+# SHA256 Fingerprint: 85:60:f9:1c:36:24:da:ba:95:70:b5:fe:a0:db:e3:6f:f1:1a:83:23:be:94:86:85:4f:b3:f3:4a:55:71:19:8d
+-----BEGIN CERTIFICATE-----
+MIICaTCCAe+gAwIBAgIQISpWDK7aDKtARb8roi066jAKBggqhkjOPQQDAzBtMQsw
+CQYDVQQGEwJDSDEQMA4GA1UEChMHV0lTZUtleTEiMCAGA1UECxMZT0lTVEUgRm91
+bmRhdGlvbiBFbmRvcnNlZDEoMCYGA1UEAxMfT0lTVEUgV0lTZUtleSBHbG9iYWwg
+Um9vdCBHQyBDQTAeFw0xNzA1MDkwOTQ4MzRaFw00MjA1MDkwOTU4MzNaMG0xCzAJ
+BgNVBAYTAkNIMRAwDgYDVQQKEwdXSVNlS2V5MSIwIAYDVQQLExlPSVNURSBGb3Vu
+ZGF0aW9uIEVuZG9yc2VkMSgwJgYDVQQDEx9PSVNURSBXSVNlS2V5IEdsb2JhbCBS
+b290IEdDIENBMHYwEAYHKoZIzj0CAQYFK4EEACIDYgAETOlQwMYPchi82PG6s4ni
+eUqjFqdrVCTbUf/q9Akkwwsin8tqJ4KBDdLArzHkdIJuyiXZjHWd8dvQmqJLIX4W
+p2OQ0jnUsYd4XxiWD1AbNTcPasbc2RNNpI6QN+a9WzGRo1QwUjAOBgNVHQ8BAf8E
+BAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUSIcUrOPDnpBgOtfKie7T
+rYy0UGYwEAYJKwYBBAGCNxUBBAMCAQAwCgYIKoZIzj0EAwMDaAAwZQIwJsdpW9zV
+57LnyAyMjMPdeYwbY9XJUpROTYJKcx6ygISpJcBMWm1JKWB4E+J+SOtkAjEA2zQg
+Mgj/mkkCtojeFK9dbJlxjRo/i9fgojaGHAeCOnZT/cKi7e97sIBPWA9LUzm9
+-----END CERTIFICATE-----
+
+# Issuer: CN=UCA Global G2 Root O=UniTrust
+# Subject: CN=UCA Global G2 Root O=UniTrust
+# Label: "UCA Global G2 Root"
+# Serial: 124779693093741543919145257850076631279
+# MD5 Fingerprint: 80:fe:f0:c4:4a:f0:5c:62:32:9f:1c:ba:78:a9:50:f8
+# SHA1 Fingerprint: 28:f9:78:16:19:7a:ff:18:25:18:aa:44:fe:c1:a0:ce:5c:b6:4c:8a
+# SHA256 Fingerprint: 9b:ea:11:c9:76:fe:01:47:64:c1:be:56:a6:f9:14:b5:a5:60:31:7a:bd:99:88:39:33:82:e5:16:1a:a0:49:3c
+-----BEGIN CERTIFICATE-----
+MIIFRjCCAy6gAwIBAgIQXd+x2lqj7V2+WmUgZQOQ7zANBgkqhkiG9w0BAQsFADA9
+MQswCQYDVQQGEwJDTjERMA8GA1UECgwIVW5pVHJ1c3QxGzAZBgNVBAMMElVDQSBH
+bG9iYWwgRzIgUm9vdDAeFw0xNjAzMTEwMDAwMDBaFw00MDEyMzEwMDAwMDBaMD0x
+CzAJBgNVBAYTAkNOMREwDwYDVQQKDAhVbmlUcnVzdDEbMBkGA1UEAwwSVUNBIEds
+b2JhbCBHMiBSb290MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAxeYr
+b3zvJgUno4Ek2m/LAfmZmqkywiKHYUGRO8vDaBsGxUypK8FnFyIdK+35KYmToni9
+kmugow2ifsqTs6bRjDXVdfkX9s9FxeV67HeToI8jrg4aA3++1NDtLnurRiNb/yzm
+VHqUwCoV8MmNsHo7JOHXaOIxPAYzRrZUEaalLyJUKlgNAQLx+hVRZ2zA+te2G3/R
+VogvGjqNO7uCEeBHANBSh6v7hn4PJGtAnTRnvI3HLYZveT6OqTwXS3+wmeOwcWDc
+C/Vkw85DvG1xudLeJ1uK6NjGruFZfc8oLTW4lVYa8bJYS7cSN8h8s+1LgOGN+jIj
+tm+3SJUIsUROhYw6AlQgL9+/V087OpAh18EmNVQg7Mc/R+zvWr9LesGtOxdQXGLY
+D0tK3Cv6brxzks3sx1DoQZbXqX5t2Okdj4q1uViSukqSKwxW/YDrCPBeKW4bHAyv
+j5OJrdu9o54hyokZ7N+1wxrrFv54NkzWbtA+FxyQF2smuvt6L78RHBgOLXMDj6Dl
+NaBa4kx1HXHhOThTeEDMg5PXCp6dW4+K5OXgSORIskfNTip1KnvyIvbJvgmRlld6
+iIis7nCs+dwp4wwcOxJORNanTrAmyPPZGpeRaOrvjUYG0lZFWJo8DA+DuAUlwznP
+O6Q0ibd5Ei9Hxeepl2n8pndntd978XplFeRhVmUCAwEAAaNCMEAwDgYDVR0PAQH/
+BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFIHEjMz15DD/pQwIX4wV
+ZyF0Ad/fMA0GCSqGSIb3DQEBCwUAA4ICAQATZSL1jiutROTL/7lo5sOASD0Ee/oj
+L3rtNtqyzm325p7lX1iPyzcyochltq44PTUbPrw7tgTQvPlJ9Zv3hcU2tsu8+Mg5
+1eRfB70VVJd0ysrtT7q6ZHafgbiERUlMjW+i67HM0cOU2kTC5uLqGOiiHycFutfl
+1qnN3e92mI0ADs0b+gO3joBYDic/UvuUospeZcnWhNq5NXHzJsBPd+aBJ9J3O5oU
+b3n09tDh05S60FdRvScFDcH9yBIw7m+NESsIndTUv4BFFJqIRNow6rSn4+7vW4LV
+PtateJLbXDzz2K36uGt/xDYotgIVilQsnLAXc47QN6MUPJiVAAwpBVueSUmxX8fj
+y88nZY41F7dXyDDZQVu5FLbowg+UMaeUmMxq67XhJ/UQqAHojhJi6IjMtX9Gl8Cb
+EGY4GjZGXyJoPd/JxhMnq1MGrKI8hgZlb7F+sSlEmqO6SWkoaY/X5V+tBIZkbxqg
+DMUIYs6Ao9Dz7GjevjPHF1t/gMRMTLGmhIrDO7gJzRSBuhjjVFc2/tsvfEehOjPI
++Vg7RE+xygKJBJYoaMVLuCaJu9YzL1DV/pqJuhgyklTGW+Cd+V7lDSKb9triyCGy
+YiGqhkCyLmTTX8jjfhFnRR8F/uOi77Oos/N9j/gMHyIfLXC0uAE0djAA5SN4p1bX
+UB+K+wb1whnw0A==
+-----END CERTIFICATE-----
+
+# Issuer: CN=UCA Extended Validation Root O=UniTrust
+# Subject: CN=UCA Extended Validation Root O=UniTrust
+# Label: "UCA Extended Validation Root"
+# Serial: 106100277556486529736699587978573607008
+# MD5 Fingerprint: a1:f3:5f:43:c6:34:9b:da:bf:8c:7e:05:53:ad:96:e2
+# SHA1 Fingerprint: a3:a1:b0:6f:24:61:23:4a:e3:36:a5:c2:37:fc:a6:ff:dd:f0:d7:3a
+# SHA256 Fingerprint: d4:3a:f9:b3:54:73:75:5c:96:84:fc:06:d7:d8:cb:70:ee:5c:28:e7:73:fb:29:4e:b4:1e:e7:17:22:92:4d:24
+-----BEGIN CERTIFICATE-----
+MIIFWjCCA0KgAwIBAgIQT9Irj/VkyDOeTzRYZiNwYDANBgkqhkiG9w0BAQsFADBH
+MQswCQYDVQQGEwJDTjERMA8GA1UECgwIVW5pVHJ1c3QxJTAjBgNVBAMMHFVDQSBF
+eHRlbmRlZCBWYWxpZGF0aW9uIFJvb3QwHhcNMTUwMzEzMDAwMDAwWhcNMzgxMjMx
+MDAwMDAwWjBHMQswCQYDVQQGEwJDTjERMA8GA1UECgwIVW5pVHJ1c3QxJTAjBgNV
+BAMMHFVDQSBFeHRlbmRlZCBWYWxpZGF0aW9uIFJvb3QwggIiMA0GCSqGSIb3DQEB
+AQUAA4ICDwAwggIKAoICAQCpCQcoEwKwmeBkqh5DFnpzsZGgdT6o+uM4AHrsiWog
+D4vFsJszA1qGxliG1cGFu0/GnEBNyr7uaZa4rYEwmnySBesFK5pI0Lh2PpbIILvS
+sPGP2KxFRv+qZ2C0d35qHzwaUnoEPQc8hQ2E0B92CvdqFN9y4zR8V05WAT558aop
+O2z6+I9tTcg1367r3CTueUWnhbYFiN6IXSV8l2RnCdm/WhUFhvMJHuxYMjMR83dk
+sHYf5BA1FxvyDrFspCqjc/wJHx4yGVMR59mzLC52LqGj3n5qiAno8geK+LLNEOfi
+c0CTuwjRP+H8C5SzJe98ptfRr5//lpr1kXuYC3fUfugH0mK1lTnj8/FtDw5lhIpj
+VMWAtuCeS31HJqcBCF3RiJ7XwzJE+oJKCmhUfzhTA8ykADNkUVkLo4KRel7sFsLz
+KuZi2irbWWIQJUoqgQtHB0MGcIfS+pMRKXpITeuUx3BNr2fVUbGAIAEBtHoIppB/
+TuDvB0GHr2qlXov7z1CymlSvw4m6WC31MJixNnI5fkkE/SmnTHnkBVfblLkWU41G
+sx2VYVdWf6/wFlthWG82UBEL2KwrlRYaDh8IzTY0ZRBiZtWAXxQgXy0MoHgKaNYs
+1+lvK9JKBZP8nm9rZ/+I8U6laUpSNwXqxhaN0sSZ0YIrO7o1dfdRUVjzyAfd5LQD
+fwIDAQABo0IwQDAdBgNVHQ4EFgQU2XQ65DA9DfcS3H5aBZ8eNJr34RQwDwYDVR0T
+AQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAYYwDQYJKoZIhvcNAQELBQADggIBADaN
+l8xCFWQpN5smLNb7rhVpLGsaGvdftvkHTFnq88nIua7Mui563MD1sC3AO6+fcAUR
+ap8lTwEpcOPlDOHqWnzcSbvBHiqB9RZLcpHIojG5qtr8nR/zXUACE/xOHAbKsxSQ
+VBcZEhrxH9cMaVr2cXj0lH2RC47skFSOvG+hTKv8dGT9cZr4QQehzZHkPJrgmzI5
+c6sq1WnIeJEmMX3ixzDx/BR4dxIOE/TdFpS/S2d7cFOFyrC78zhNLJA5wA3CXWvp
+4uXViI3WLL+rG761KIcSF3Ru/H38j9CHJrAb+7lsq+KePRXBOy5nAliRn+/4Qh8s
+t2j1da3Ptfb/EX3C8CSlrdP6oDyp+l3cpaDvRKS+1ujl5BOWF3sGPjLtx7dCvHaj
+2GU4Kzg1USEODm8uNBNA4StnDG1KQTAYI1oyVZnJF+A83vbsea0rWBmirSwiGpWO
+vpaQXUJXxPkUAzUrHC1RVwinOt4/5Mi0A3PCwSaAuwtCH60NryZy2sy+s6ODWA2C
+xR9GUeOcGMyNm43sSet1UNWMKFnKdDTajAshqx7qG+XH/RU+wBeq+yNuJkbL+vmx
+cmtpzyKEC2IPrNkZAJSidjzULZrtBJ4tBmIQN1IchXIbJ+XMxjHsN+xjWZsLHXbM
+fjKaiJUINlK73nZfdklJrX+9ZSCyycErdhh2n1ax
+-----END CERTIFICATE-----
+
+# Issuer: CN=Certigna Root CA O=Dhimyotis OU=0002 48146308100036
+# Subject: CN=Certigna Root CA O=Dhimyotis OU=0002 48146308100036
+# Label: "Certigna Root CA"
+# Serial: 269714418870597844693661054334862075617
+# MD5 Fingerprint: 0e:5c:30:62:27:eb:5b:bc:d7:ae:62:ba:e9:d5:df:77
+# SHA1 Fingerprint: 2d:0d:52:14:ff:9e:ad:99:24:01:74:20:47:6e:6c:85:27:27:f5:43
+# SHA256 Fingerprint: d4:8d:3d:23:ee:db:50:a4:59:e5:51:97:60:1c:27:77:4b:9d:7b:18:c9:4d:5a:05:95:11:a1:02:50:b9:31:68
+-----BEGIN CERTIFICATE-----
+MIIGWzCCBEOgAwIBAgIRAMrpG4nxVQMNo+ZBbcTjpuEwDQYJKoZIhvcNAQELBQAw
+WjELMAkGA1UEBhMCRlIxEjAQBgNVBAoMCURoaW15b3RpczEcMBoGA1UECwwTMDAw
+MiA0ODE0NjMwODEwMDAzNjEZMBcGA1UEAwwQQ2VydGlnbmEgUm9vdCBDQTAeFw0x
+MzEwMDEwODMyMjdaFw0zMzEwMDEwODMyMjdaMFoxCzAJBgNVBAYTAkZSMRIwEAYD
+VQQKDAlEaGlteW90aXMxHDAaBgNVBAsMEzAwMDIgNDgxNDYzMDgxMDAwMzYxGTAX
+BgNVBAMMEENlcnRpZ25hIFJvb3QgQ0EwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAw
+ggIKAoICAQDNGDllGlmx6mQWDoyUJJV8g9PFOSbcDO8WV43X2KyjQn+Cyu3NW9sO
+ty3tRQgXstmzy9YXUnIo245Onoq2C/mehJpNdt4iKVzSs9IGPjA5qXSjklYcoW9M
+CiBtnyN6tMbaLOQdLNyzKNAT8kxOAkmhVECe5uUFoC2EyP+YbNDrihqECB63aCPu
+I9Vwzm1RaRDuoXrC0SIxwoKF0vJVdlB8JXrJhFwLrN1CTivngqIkicuQstDuI7pm
+TLtipPlTWmR7fJj6o0ieD5Wupxj0auwuA0Wv8HT4Ks16XdG+RCYyKfHx9WzMfgIh
+C59vpD++nVPiz32pLHxYGpfhPTc3GGYo0kDFUYqMwy3OU4gkWGQwFsWq4NYKpkDf
+ePb1BHxpE4S80dGnBs8B92jAqFe7OmGtBIyT46388NtEbVncSVmurJqZNjBBe3Yz
+IoejwpKGbvlw7q6Hh5UbxHq9MfPU0uWZ/75I7HX1eBYdpnDBfzwboZL7z8g81sWT
+Co/1VTp2lc5ZmIoJlXcymoO6LAQ6l73UL77XbJuiyn1tJslV1c/DeVIICZkHJC1k
+JWumIWmbat10TWuXekG9qxf5kBdIjzb5LdXF2+6qhUVB+s06RbFo5jZMm5BX7CO5
+hwjCxAnxl4YqKE3idMDaxIzb3+KhF1nOJFl0Mdp//TBt2dzhauH8XwIDAQABo4IB
+GjCCARYwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYE
+FBiHVuBud+4kNTxOc5of1uHieX4rMB8GA1UdIwQYMBaAFBiHVuBud+4kNTxOc5of
+1uHieX4rMEQGA1UdIAQ9MDswOQYEVR0gADAxMC8GCCsGAQUFBwIBFiNodHRwczov
+L3d3d3cuY2VydGlnbmEuZnIvYXV0b3JpdGVzLzBtBgNVHR8EZjBkMC+gLaArhilo
+dHRwOi8vY3JsLmNlcnRpZ25hLmZyL2NlcnRpZ25hcm9vdGNhLmNybDAxoC+gLYYr
+aHR0cDovL2NybC5kaGlteW90aXMuY29tL2NlcnRpZ25hcm9vdGNhLmNybDANBgkq
+hkiG9w0BAQsFAAOCAgEAlLieT/DjlQgi581oQfccVdV8AOItOoldaDgvUSILSo3L
+6btdPrtcPbEo/uRTVRPPoZAbAh1fZkYJMyjhDSSXcNMQH+pkV5a7XdrnxIxPTGRG
+HVyH41neQtGbqH6mid2PHMkwgu07nM3A6RngatgCdTer9zQoKJHyBApPNeNgJgH6
+0BGM+RFq7q89w1DTj18zeTyGqHNFkIwgtnJzFyO+B2XleJINugHA64wcZr+shncB
+lA2c5uk5jR+mUYyZDDl34bSb+hxnV29qao6pK0xXeXpXIs/NX2NGjVxZOob4Mkdi
+o2cNGJHc+6Zr9UhhcyNZjgKnvETq9Emd8VRY+WCv2hikLyhF3HqgiIZd8zvn/yk1
+gPxkQ5Tm4xxvvq0OKmOZK8l+hfZx6AYDlf7ej0gcWtSS6Cvu5zHbugRqh5jnxV/v
+faci9wHYTfmJ0A6aBVmknpjZbyvKcL5kwlWj9Omvw5Ip3IgWJJk8jSaYtlu3zM63
+Nwf9JtmYhST/WSMDmu2dnajkXjjO11INb9I/bbEFa0nOipFGc/T2L/Coc3cOZayh
+jWZSaX5LaAzHHjcng6WMxwLkFM1JAbBzs/3GkDpv0mztO+7skb6iQ12LAEpmJURw
+3kAP+HwV96LOPNdeE4yBFxgX0b3xdxA61GU5wSesVywlVP+i2k+KYTlerj1KjL0=
+-----END CERTIFICATE-----
+
+# Issuer: CN=emSign Root CA - G1 O=eMudhra Technologies Limited OU=emSign PKI
+# Subject: CN=emSign Root CA - G1 O=eMudhra Technologies Limited OU=emSign PKI
+# Label: "emSign Root CA - G1"
+# Serial: 235931866688319308814040
+# MD5 Fingerprint: 9c:42:84:57:dd:cb:0b:a7:2e:95:ad:b6:f3:da:bc:ac
+# SHA1 Fingerprint: 8a:c7:ad:8f:73:ac:4e:c1:b5:75:4d:a5:40:f4:fc:cf:7c:b5:8e:8c
+# SHA256 Fingerprint: 40:f6:af:03:46:a9:9a:a1:cd:1d:55:5a:4e:9c:ce:62:c7:f9:63:46:03:ee:40:66:15:83:3d:c8:c8:d0:03:67
+-----BEGIN CERTIFICATE-----
+MIIDlDCCAnygAwIBAgIKMfXkYgxsWO3W2DANBgkqhkiG9w0BAQsFADBnMQswCQYD
+VQQGEwJJTjETMBEGA1UECxMKZW1TaWduIFBLSTElMCMGA1UEChMcZU11ZGhyYSBU
+ZWNobm9sb2dpZXMgTGltaXRlZDEcMBoGA1UEAxMTZW1TaWduIFJvb3QgQ0EgLSBH
+MTAeFw0xODAyMTgxODMwMDBaFw00MzAyMTgxODMwMDBaMGcxCzAJBgNVBAYTAklO
+MRMwEQYDVQQLEwplbVNpZ24gUEtJMSUwIwYDVQQKExxlTXVkaHJhIFRlY2hub2xv
+Z2llcyBMaW1pdGVkMRwwGgYDVQQDExNlbVNpZ24gUm9vdCBDQSAtIEcxMIIBIjAN
+BgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAk0u76WaK7p1b1TST0Bsew+eeuGQz
+f2N4aLTNLnF115sgxk0pvLZoYIr3IZpWNVrzdr3YzZr/k1ZLpVkGoZM0Kd0WNHVO
+8oG0x5ZOrRkVUkr+PHB1cM2vK6sVmjM8qrOLqs1D/fXqcP/tzxE7lM5OMhbTI0Aq
+d7OvPAEsbO2ZLIvZTmmYsvePQbAyeGHWDV/D+qJAkh1cF+ZwPjXnorfCYuKrpDhM
+tTk1b+oDafo6VGiFbdbyL0NVHpENDtjVaqSW0RM8LHhQ6DqS0hdW5TUaQBw+jSzt
+Od9C4INBdN+jzcKGYEho42kLVACL5HZpIQ15TjQIXhTCzLG3rdd8cIrHhQIDAQAB
+o0IwQDAdBgNVHQ4EFgQU++8Nhp6w492pufEhF38+/PB3KxowDgYDVR0PAQH/BAQD
+AgEGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQELBQADggEBAFn/8oz1h31x
+PaOfG1vR2vjTnGs2vZupYeveFix0PZ7mddrXuqe8QhfnPZHr5X3dPpzxz5KsbEjM
+wiI/aTvFthUvozXGaCocV685743QNcMYDHsAVhzNixl03r4PEuDQqqE/AjSxcM6d
+GNYIAwlG7mDgfrbESQRRfXBgvKqy/3lyeqYdPV8q+Mri/Tm3R7nrft8EI6/6nAYH
+6ftjk4BAtcZsCjEozgyfz7MjNYBBjWzEN3uBL4ChQEKF6dk4jeihU80Bv2noWgby
+RQuQ+q7hv53yrlc8pa6yVvSLZUDp/TGBLPQ5Cdjua6e0ph0VpZj3AYHYhX3zUVxx
+iN66zB+Afko=
+-----END CERTIFICATE-----
+
+# Issuer: CN=emSign ECC Root CA - G3 O=eMudhra Technologies Limited OU=emSign PKI
+# Subject: CN=emSign ECC Root CA - G3 O=eMudhra Technologies Limited OU=emSign PKI
+# Label: "emSign ECC Root CA - G3"
+# Serial: 287880440101571086945156
+# MD5 Fingerprint: ce:0b:72:d1:9f:88:8e:d0:50:03:e8:e3:b8:8b:67:40
+# SHA1 Fingerprint: 30:43:fa:4f:f2:57:dc:a0:c3:80:ee:2e:58:ea:78:b2:3f:e6:bb:c1
+# SHA256 Fingerprint: 86:a1:ec:ba:08:9c:4a:8d:3b:be:27:34:c6:12:ba:34:1d:81:3e:04:3c:f9:e8:a8:62:cd:5c:57:a3:6b:be:6b
+-----BEGIN CERTIFICATE-----
+MIICTjCCAdOgAwIBAgIKPPYHqWhwDtqLhDAKBggqhkjOPQQDAzBrMQswCQYDVQQG
+EwJJTjETMBEGA1UECxMKZW1TaWduIFBLSTElMCMGA1UEChMcZU11ZGhyYSBUZWNo
+bm9sb2dpZXMgTGltaXRlZDEgMB4GA1UEAxMXZW1TaWduIEVDQyBSb290IENBIC0g
+RzMwHhcNMTgwMjE4MTgzMDAwWhcNNDMwMjE4MTgzMDAwWjBrMQswCQYDVQQGEwJJ
+TjETMBEGA1UECxMKZW1TaWduIFBLSTElMCMGA1UEChMcZU11ZGhyYSBUZWNobm9s
+b2dpZXMgTGltaXRlZDEgMB4GA1UEAxMXZW1TaWduIEVDQyBSb290IENBIC0gRzMw
+djAQBgcqhkjOPQIBBgUrgQQAIgNiAAQjpQy4LRL1KPOxst3iAhKAnjlfSU2fySU0
+WXTsuwYc58Byr+iuL+FBVIcUqEqy6HyC5ltqtdyzdc6LBtCGI79G1Y4PPwT01xyS
+fvalY8L1X44uT6EYGQIrMgqCZH0Wk9GjQjBAMB0GA1UdDgQWBBR8XQKEE9TMipuB
+zhccLikenEhjQjAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAKBggq
+hkjOPQQDAwNpADBmAjEAvvNhzwIQHWSVB7gYboiFBS+DCBeQyh+KTOgNG3qxrdWB
+CUfvO6wIBHxcmbHtRwfSAjEAnbpV/KlK6O3t5nYBQnvI+GDZjVGLVTv7jHvrZQnD
++JbNR6iC8hZVdyR+EhCVBCyj
+-----END CERTIFICATE-----
+
+# Issuer: CN=emSign Root CA - C1 O=eMudhra Inc OU=emSign PKI
+# Subject: CN=emSign Root CA - C1 O=eMudhra Inc OU=emSign PKI
+# Label: "emSign Root CA - C1"
+# Serial: 825510296613316004955058
+# MD5 Fingerprint: d8:e3:5d:01:21:fa:78:5a:b0:df:ba:d2:ee:2a:5f:68
+# SHA1 Fingerprint: e7:2e:f1:df:fc:b2:09:28:cf:5d:d4:d5:67:37:b1:51:cb:86:4f:01
+# SHA256 Fingerprint: 12:56:09:aa:30:1d:a0:a2:49:b9:7a:82:39:cb:6a:34:21:6f:44:dc:ac:9f:39:54:b1:42:92:f2:e8:c8:60:8f
+-----BEGIN CERTIFICATE-----
+MIIDczCCAlugAwIBAgILAK7PALrEzzL4Q7IwDQYJKoZIhvcNAQELBQAwVjELMAkG
+A1UEBhMCVVMxEzARBgNVBAsTCmVtU2lnbiBQS0kxFDASBgNVBAoTC2VNdWRocmEg
+SW5jMRwwGgYDVQQDExNlbVNpZ24gUm9vdCBDQSAtIEMxMB4XDTE4MDIxODE4MzAw
+MFoXDTQzMDIxODE4MzAwMFowVjELMAkGA1UEBhMCVVMxEzARBgNVBAsTCmVtU2ln
+biBQS0kxFDASBgNVBAoTC2VNdWRocmEgSW5jMRwwGgYDVQQDExNlbVNpZ24gUm9v
+dCBDQSAtIEMxMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAz+upufGZ
+BczYKCFK83M0UYRWEPWgTywS4/oTmifQz/l5GnRfHXk5/Fv4cI7gklL35CX5VIPZ
+HdPIWoU/Xse2B+4+wM6ar6xWQio5JXDWv7V7Nq2s9nPczdcdioOl+yuQFTdrHCZH
+3DspVpNqs8FqOp099cGXOFgFixwR4+S0uF2FHYP+eF8LRWgYSKVGczQ7/g/IdrvH
+GPMF0Ybzhe3nudkyrVWIzqa2kbBPrH4VI5b2P/AgNBbeCsbEBEV5f6f9vtKppa+c
+xSMq9zwhbL2vj07FOrLzNBL834AaSaTUqZX3noleoomslMuoaJuvimUnzYnu3Yy1
+aylwQ6BpC+S5DwIDAQABo0IwQDAdBgNVHQ4EFgQU/qHgcB4qAzlSWkK+XJGFehiq
+TbUwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEL
+BQADggEBAMJKVvoVIXsoounlHfv4LcQ5lkFMOycsxGwYFYDGrK9HWS8mC+M2sO87
+/kOXSTKZEhVb3xEp/6tT+LvBeA+snFOvV71ojD1pM/CjoCNjO2RnIkSt1XHLVip4
+kqNPEjE2NuLe/gDEo2APJ62gsIq1NnpSob0n9CAnYuhNlCQT5AoE6TyrLshDCUrG
+YQTlSTR+08TI9Q/Aqum6VF7zYytPT1DU/rl7mYw9wC68AivTxEDkigcxHpvOJpkT
++xHqmiIMERnHXhuBUDDIlhJu58tBf5E7oke3VIAb3ADMmpDqw8NQBmIMMMAVSKeo
+WXzhriKi4gp6D/piq1JM4fHfyr6DDUI=
+-----END CERTIFICATE-----
+
+# Issuer: CN=emSign ECC Root CA - C3 O=eMudhra Inc OU=emSign PKI
+# Subject: CN=emSign ECC Root CA - C3 O=eMudhra Inc OU=emSign PKI
+# Label: "emSign ECC Root CA - C3"
+# Serial: 582948710642506000014504
+# MD5 Fingerprint: 3e:53:b3:a3:81:ee:d7:10:f8:d3:b0:1d:17:92:f5:d5
+# SHA1 Fingerprint: b6:af:43:c2:9b:81:53:7d:f6:ef:6b:c3:1f:1f:60:15:0c:ee:48:66
+# SHA256 Fingerprint: bc:4d:80:9b:15:18:9d:78:db:3e:1d:8c:f4:f9:72:6a:79:5d:a1:64:3c:a5:f1:35:8e:1d:db:0e:dc:0d:7e:b3
+-----BEGIN CERTIFICATE-----
+MIICKzCCAbGgAwIBAgIKe3G2gla4EnycqDAKBggqhkjOPQQDAzBaMQswCQYDVQQG
+EwJVUzETMBEGA1UECxMKZW1TaWduIFBLSTEUMBIGA1UEChMLZU11ZGhyYSBJbmMx
+IDAeBgNVBAMTF2VtU2lnbiBFQ0MgUm9vdCBDQSAtIEMzMB4XDTE4MDIxODE4MzAw
+MFoXDTQzMDIxODE4MzAwMFowWjELMAkGA1UEBhMCVVMxEzARBgNVBAsTCmVtU2ln
+biBQS0kxFDASBgNVBAoTC2VNdWRocmEgSW5jMSAwHgYDVQQDExdlbVNpZ24gRUND
+IFJvb3QgQ0EgLSBDMzB2MBAGByqGSM49AgEGBSuBBAAiA2IABP2lYa57JhAd6bci
+MK4G9IGzsUJxlTm801Ljr6/58pc1kjZGDoeVjbk5Wum739D+yAdBPLtVb4Ojavti
+sIGJAnB9SMVK4+kiVCJNk7tCDK93nCOmfddhEc5lx/h//vXyqaNCMEAwHQYDVR0O
+BBYEFPtaSNCAIEDyqOkAB2kZd6fmw/TPMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMB
+Af8EBTADAQH/MAoGCCqGSM49BAMDA2gAMGUCMQC02C8Cif22TGK6Q04ThHK1rt0c
+3ta13FaPWEBaLd4gTCKDypOofu4SQMfWh0/434UCMBwUZOR8loMRnLDRWmFLpg9J
+0wD8ofzkpf9/rdcw0Md3f76BB1UwUCAU9Vc4CqgxUQ==
+-----END CERTIFICATE-----
+
+# Issuer: CN=Hongkong Post Root CA 3 O=Hongkong Post
+# Subject: CN=Hongkong Post Root CA 3 O=Hongkong Post
+# Label: "Hongkong Post Root CA 3"
+# Serial: 46170865288971385588281144162979347873371282084
+# MD5 Fingerprint: 11:fc:9f:bd:73:30:02:8a:fd:3f:f3:58:b9:cb:20:f0
+# SHA1 Fingerprint: 58:a2:d0:ec:20:52:81:5b:c1:f3:f8:64:02:24:4e:c2:8e:02:4b:02
+# SHA256 Fingerprint: 5a:2f:c0:3f:0c:83:b0:90:bb:fa:40:60:4b:09:88:44:6c:76:36:18:3d:f9:84:6e:17:10:1a:44:7f:b8:ef:d6
+-----BEGIN CERTIFICATE-----
+MIIFzzCCA7egAwIBAgIUCBZfikyl7ADJk0DfxMauI7gcWqQwDQYJKoZIhvcNAQEL
+BQAwbzELMAkGA1UEBhMCSEsxEjAQBgNVBAgTCUhvbmcgS29uZzESMBAGA1UEBxMJ
+SG9uZyBLb25nMRYwFAYDVQQKEw1Ib25na29uZyBQb3N0MSAwHgYDVQQDExdIb25n
+a29uZyBQb3N0IFJvb3QgQ0EgMzAeFw0xNzA2MDMwMjI5NDZaFw00MjA2MDMwMjI5
+NDZaMG8xCzAJBgNVBAYTAkhLMRIwEAYDVQQIEwlIb25nIEtvbmcxEjAQBgNVBAcT
+CUhvbmcgS29uZzEWMBQGA1UEChMNSG9uZ2tvbmcgUG9zdDEgMB4GA1UEAxMXSG9u
+Z2tvbmcgUG9zdCBSb290IENBIDMwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIK
+AoICAQCziNfqzg8gTr7m1gNt7ln8wlffKWihgw4+aMdoWJwcYEuJQwy51BWy7sFO
+dem1p+/l6TWZ5Mwc50tfjTMwIDNT2aa71T4Tjukfh0mtUC1Qyhi+AViiE3CWu4mI
+VoBc+L0sPOFMV4i707mV78vH9toxdCim5lSJ9UExyuUmGs2C4HDaOym71QP1mbpV
+9WTRYA6ziUm4ii8F0oRFKHyPaFASePwLtVPLwpgchKOesL4jpNrcyCse2m5FHomY
+2vkALgbpDDtw1VAliJnLzXNg99X/NWfFobxeq81KuEXryGgeDQ0URhLj0mRiikKY
+vLTGCAj4/ahMZJx2Ab0vqWwzD9g/KLg8aQFChn5pwckGyuV6RmXpwtZQQS4/t+Tt
+bNe/JgERohYpSms0BpDsE9K2+2p20jzt8NYt3eEV7KObLyzJPivkaTv/ciWxNoZb
+x39ri1UbSsUgYT2uy1DhCDq+sI9jQVMwCFk8mB13umOResoQUGC/8Ne8lYePl8X+
+l2oBlKN8W4UdKjk60FSh0Tlxnf0h+bV78OLgAo9uliQlLKAeLKjEiafv7ZkGL7YK
+TE/bosw3Gq9HhS2KX8Q0NEwA/RiTZxPRN+ZItIsGxVd7GYYKecsAyVKvQv83j+Gj
+Hno9UKtjBucVtT+2RTeUN7F+8kjDf8V1/peNRY8apxpyKBpADwIDAQABo2MwYTAP
+BgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBBjAfBgNVHSMEGDAWgBQXnc0e
+i9Y5K3DTXNSguB+wAPzFYTAdBgNVHQ4EFgQUF53NHovWOStw01zUoLgfsAD8xWEw
+DQYJKoZIhvcNAQELBQADggIBAFbVe27mIgHSQpsY1Q7XZiNc4/6gx5LS6ZStS6LG
+7BJ8dNVI0lkUmcDrudHr9EgwW62nV3OZqdPlt9EuWSRY3GguLmLYauRwCy0gUCCk
+MpXRAJi70/33MvJJrsZ64Ee+bs7Lo3I6LWldy8joRTnU+kLBEUx3XZL7av9YROXr
+gZ6voJmtvqkBZss4HTzfQx/0TW60uhdG/H39h4F5ag0zD/ov+BS5gLNdTaqX4fnk
+GMX41TiMJjz98iji7lpJiCzfeT2OnpA8vUFKOt1b9pq0zj8lMH8yfaIDlNDceqFS
+3m6TjRgm/VWsvY+b0s+v54Ysyx8Jb6NvqYTUc79NoXQbTiNg8swOqn+knEwlqLJm
+Ozj/2ZQw9nKEvmhVEA/GcywWaZMH/rFF7buiVWqw2rVKAiUnhde3t4ZEFolsgCs+
+l6mc1X5VTMbeRRAc6uk7nwNT7u56AQIWeNTowr5GdogTPyK7SBIdUgC0An4hGh6c
+JfTzPV4e0hz5sy229zdcxsshTrD3mUcYhcErulWuBurQB7Lcq9CClnXO0lD+mefP
+L5/ndtFhKvshuzHQqp9HpLIiyhY6UFfEW0NnxWViA0kB60PZ2Pierc+xYw5F9KBa
+LJstxabArahH9CdMOA0uG0k7UvToiIMrVCjU8jVStDKDYmlkDJGcn5fqdBb9HxEG
+mpv0
+-----END CERTIFICATE-----
+
+# Issuer: CN=Entrust Root Certification Authority - G4 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2015 Entrust, Inc. - for authorized use only
+# Subject: CN=Entrust Root Certification Authority - G4 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2015 Entrust, Inc. - for authorized use only
+# Label: "Entrust Root Certification Authority - G4"
+# Serial: 289383649854506086828220374796556676440
+# MD5 Fingerprint: 89:53:f1:83:23:b7:7c:8e:05:f1:8c:71:38:4e:1f:88
+# SHA1 Fingerprint: 14:88:4e:86:26:37:b0:26:af:59:62:5c:40:77:ec:35:29:ba:96:01
+# SHA256 Fingerprint: db:35:17:d1:f6:73:2a:2d:5a:b9:7c:53:3e:c7:07:79:ee:32:70:a6:2f:b4:ac:42:38:37:24:60:e6:f0:1e:88
+-----BEGIN CERTIFICATE-----
+MIIGSzCCBDOgAwIBAgIRANm1Q3+vqTkPAAAAAFVlrVgwDQYJKoZIhvcNAQELBQAw
+gb4xCzAJBgNVBAYTAlVTMRYwFAYDVQQKEw1FbnRydXN0LCBJbmMuMSgwJgYDVQQL
+Ex9TZWUgd3d3LmVudHJ1c3QubmV0L2xlZ2FsLXRlcm1zMTkwNwYDVQQLEzAoYykg
+MjAxNSBFbnRydXN0LCBJbmMuIC0gZm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxMjAw
+BgNVBAMTKUVudHJ1c3QgUm9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eSAtIEc0
+MB4XDTE1MDUyNzExMTExNloXDTM3MTIyNzExNDExNlowgb4xCzAJBgNVBAYTAlVT
+MRYwFAYDVQQKEw1FbnRydXN0LCBJbmMuMSgwJgYDVQQLEx9TZWUgd3d3LmVudHJ1
+c3QubmV0L2xlZ2FsLXRlcm1zMTkwNwYDVQQLEzAoYykgMjAxNSBFbnRydXN0LCBJ
+bmMuIC0gZm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxMjAwBgNVBAMTKUVudHJ1c3Qg
+Um9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eSAtIEc0MIICIjANBgkqhkiG9w0B
+AQEFAAOCAg8AMIICCgKCAgEAsewsQu7i0TD/pZJH4i3DumSXbcr3DbVZwbPLqGgZ
+2K+EbTBwXX7zLtJTmeH+H17ZSK9dE43b/2MzTdMAArzE+NEGCJR5WIoV3imz/f3E
+T+iq4qA7ec2/a0My3dl0ELn39GjUu9CH1apLiipvKgS1sqbHoHrmSKvS0VnM1n4j
+5pds8ELl3FFLFUHtSUrJ3hCX1nbB76W1NhSXNdh4IjVS70O92yfbYVaCNNzLiGAM
+C1rlLAHGVK/XqsEQe9IFWrhAnoanw5CGAlZSCXqc0ieCU0plUmr1POeo8pyvi73T
+DtTUXm6Hnmo9RR3RXRv06QqsYJn7ibT/mCzPfB3pAqoEmh643IhuJbNsZvc8kPNX
+wbMv9W3y+8qh+CmdRouzavbmZwe+LGcKKh9asj5XxNMhIWNlUpEbsZmOeX7m640A
+2Vqq6nPopIICR5b+W45UYaPrL0swsIsjdXJ8ITzI9vF01Bx7owVV7rtNOzK+mndm
+nqxpkCIHH2E6lr7lmk/MBTwoWdPBDFSoWWG9yHJM6Nyfh3+9nEg2XpWjDrk4JFX8
+dWbrAuMINClKxuMrLzOg2qOGpRKX/YAr2hRC45K9PvJdXmd0LhyIRyk0X+IyqJwl
+N4y6mACXi0mWHv0liqzc2thddG5msP9E36EYxr5ILzeUePiVSj9/E15dWf10hkNj
+c0kCAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYD
+VR0OBBYEFJ84xFYjwznooHFs6FRM5Og6sb9nMA0GCSqGSIb3DQEBCwUAA4ICAQAS
+5UKme4sPDORGpbZgQIeMJX6tuGguW8ZAdjwD+MlZ9POrYs4QjbRaZIxowLByQzTS
+Gwv2LFPSypBLhmb8qoMi9IsabyZIrHZ3CL/FmFz0Jomee8O5ZDIBf9PD3Vht7LGr
+hFV0d4QEJ1JrhkzO3bll/9bGXp+aEJlLdWr+aumXIOTkdnrG0CSqkM0gkLpHZPt/
+B7NTeLUKYvJzQ85BK4FqLoUWlFPUa19yIqtRLULVAJyZv967lDtX/Zr1hstWO1uI
+AeV8KEsD+UmDfLJ/fOPtjqF/YFOOVZ1QNBIPt5d7bIdKROf1beyAN/BYGW5KaHbw
+H5Lk6rWS02FREAutp9lfx1/cH6NcjKF+m7ee01ZvZl4HliDtC3T7Zk6LERXpgUl+
+b7DUUH8i119lAg2m9IUe2K4GS0qn0jFmwvjO5QimpAKWRGhXxNUzzxkvFMSUHHuk
+2fCfDrGA4tGeEWSpiBE6doLlYsKA2KSD7ZPvfC+QsDJMlhVoSFLUmQjAJOgc47Ol
+IQ6SwJAfzyBfyjs4x7dtOvPmRLgOMWuIjnDrnBdSqEGULoe256YSxXXfW8AKbnuk
+5F6G+TaU33fD6Q3AOfF5u0aOq0NZJ7cguyPpVkAh7DE9ZapD8j3fcEThuk0mEDuY
+n/PIjhs4ViFqUZPTkcpG2om3PVODLAgfi49T3f+sHw==
+-----END CERTIFICATE-----
+
+# Issuer: CN=Microsoft ECC Root Certificate Authority 2017 O=Microsoft Corporation
+# Subject: CN=Microsoft ECC Root Certificate Authority 2017 O=Microsoft Corporation
+# Label: "Microsoft ECC Root Certificate Authority 2017"
+# Serial: 136839042543790627607696632466672567020
+# MD5 Fingerprint: dd:a1:03:e6:4a:93:10:d1:bf:f0:19:42:cb:fe:ed:67
+# SHA1 Fingerprint: 99:9a:64:c3:7f:f4:7d:9f:ab:95:f1:47:69:89:14:60:ee:c4:c3:c5
+# SHA256 Fingerprint: 35:8d:f3:9d:76:4a:f9:e1:b7:66:e9:c9:72:df:35:2e:e1:5c:fa:c2:27:af:6a:d1:d7:0e:8e:4a:6e:dc:ba:02
+-----BEGIN CERTIFICATE-----
+MIICWTCCAd+gAwIBAgIQZvI9r4fei7FK6gxXMQHC7DAKBggqhkjOPQQDAzBlMQsw
+CQYDVQQGEwJVUzEeMBwGA1UEChMVTWljcm9zb2Z0IENvcnBvcmF0aW9uMTYwNAYD
+VQQDEy1NaWNyb3NvZnQgRUNDIFJvb3QgQ2VydGlmaWNhdGUgQXV0aG9yaXR5IDIw
+MTcwHhcNMTkxMjE4MjMwNjQ1WhcNNDIwNzE4MjMxNjA0WjBlMQswCQYDVQQGEwJV
+UzEeMBwGA1UEChMVTWljcm9zb2Z0IENvcnBvcmF0aW9uMTYwNAYDVQQDEy1NaWNy
+b3NvZnQgRUNDIFJvb3QgQ2VydGlmaWNhdGUgQXV0aG9yaXR5IDIwMTcwdjAQBgcq
+hkjOPQIBBgUrgQQAIgNiAATUvD0CQnVBEyPNgASGAlEvaqiBYgtlzPbKnR5vSmZR
+ogPZnZH6thaxjG7efM3beaYvzrvOcS/lpaso7GMEZpn4+vKTEAXhgShC48Zo9OYb
+hGBKia/teQ87zvH2RPUBeMCjVDBSMA4GA1UdDwEB/wQEAwIBhjAPBgNVHRMBAf8E
+BTADAQH/MB0GA1UdDgQWBBTIy5lycFIM+Oa+sgRXKSrPQhDtNTAQBgkrBgEEAYI3
+FQEEAwIBADAKBggqhkjOPQQDAwNoADBlAjBY8k3qDPlfXu5gKcs68tvWMoQZP3zV
+L8KxzJOuULsJMsbG7X7JNpQS5GiFBqIb0C8CMQCZ6Ra0DvpWSNSkMBaReNtUjGUB
+iudQZsIxtzm6uBoiB078a1QWIP8rtedMDE2mT3M=
+-----END CERTIFICATE-----
+
+# Issuer: CN=Microsoft RSA Root Certificate Authority 2017 O=Microsoft Corporation
+# Subject: CN=Microsoft RSA Root Certificate Authority 2017 O=Microsoft Corporation
+# Label: "Microsoft RSA Root Certificate Authority 2017"
+# Serial: 40975477897264996090493496164228220339
+# MD5 Fingerprint: 10:ff:00:ff:cf:c9:f8:c7:7a:c0:ee:35:8e:c9:0f:47
+# SHA1 Fingerprint: 73:a5:e6:4a:3b:ff:83:16:ff:0e:dc:cc:61:8a:90:6e:4e:ae:4d:74
+# SHA256 Fingerprint: c7:41:f7:0f:4b:2a:8d:88:bf:2e:71:c1:41:22:ef:53:ef:10:eb:a0:cf:a5:e6:4c:fa:20:f4:18:85:30:73:e0
+-----BEGIN CERTIFICATE-----
+MIIFqDCCA5CgAwIBAgIQHtOXCV/YtLNHcB6qvn9FszANBgkqhkiG9w0BAQwFADBl
+MQswCQYDVQQGEwJVUzEeMBwGA1UEChMVTWljcm9zb2Z0IENvcnBvcmF0aW9uMTYw
+NAYDVQQDEy1NaWNyb3NvZnQgUlNBIFJvb3QgQ2VydGlmaWNhdGUgQXV0aG9yaXR5
+IDIwMTcwHhcNMTkxMjE4MjI1MTIyWhcNNDIwNzE4MjMwMDIzWjBlMQswCQYDVQQG
+EwJVUzEeMBwGA1UEChMVTWljcm9zb2Z0IENvcnBvcmF0aW9uMTYwNAYDVQQDEy1N
+aWNyb3NvZnQgUlNBIFJvb3QgQ2VydGlmaWNhdGUgQXV0aG9yaXR5IDIwMTcwggIi
+MA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDKW76UM4wplZEWCpW9R2LBifOZ
+Nt9GkMml7Xhqb0eRaPgnZ1AzHaGm++DlQ6OEAlcBXZxIQIJTELy/xztokLaCLeX0
+ZdDMbRnMlfl7rEqUrQ7eS0MdhweSE5CAg2Q1OQT85elss7YfUJQ4ZVBcF0a5toW1
+HLUX6NZFndiyJrDKxHBKrmCk3bPZ7Pw71VdyvD/IybLeS2v4I2wDwAW9lcfNcztm
+gGTjGqwu+UcF8ga2m3P1eDNbx6H7JyqhtJqRjJHTOoI+dkC0zVJhUXAoP8XFWvLJ
+jEm7FFtNyP9nTUwSlq31/niol4fX/V4ggNyhSyL71Imtus5Hl0dVe49FyGcohJUc
+aDDv70ngNXtk55iwlNpNhTs+VcQor1fznhPbRiefHqJeRIOkpcrVE7NLP8TjwuaG
+YaRSMLl6IE9vDzhTyzMMEyuP1pq9KsgtsRx9S1HKR9FIJ3Jdh+vVReZIZZ2vUpC6
+W6IYZVcSn2i51BVrlMRpIpj0M+Dt+VGOQVDJNE92kKz8OMHY4Xu54+OU4UZpyw4K
+UGsTuqwPN1q3ErWQgR5WrlcihtnJ0tHXUeOrO8ZV/R4O03QK0dqq6mm4lyiPSMQH
++FJDOvTKVTUssKZqwJz58oHhEmrARdlns87/I6KJClTUFLkqqNfs+avNJVgyeY+Q
+W5g5xAgGwax/Dj0ApQIDAQABo1QwUjAOBgNVHQ8BAf8EBAMCAYYwDwYDVR0TAQH/
+BAUwAwEB/zAdBgNVHQ4EFgQUCctZf4aycI8awznjwNnpv7tNsiMwEAYJKwYBBAGC
+NxUBBAMCAQAwDQYJKoZIhvcNAQEMBQADggIBAKyvPl3CEZaJjqPnktaXFbgToqZC
+LgLNFgVZJ8og6Lq46BrsTaiXVq5lQ7GPAJtSzVXNUzltYkyLDVt8LkS/gxCP81OC
+gMNPOsduET/m4xaRhPtthH80dK2Jp86519efhGSSvpWhrQlTM93uCupKUY5vVau6
+tZRGrox/2KJQJWVggEbbMwSubLWYdFQl3JPk+ONVFT24bcMKpBLBaYVu32TxU5nh
+SnUgnZUP5NbcA/FZGOhHibJXWpS2qdgXKxdJ5XbLwVaZOjex/2kskZGT4d9Mozd2
+TaGf+G0eHdP67Pv0RR0Tbc/3WeUiJ3IrhvNXuzDtJE3cfVa7o7P4NHmJweDyAmH3
+pvwPuxwXC65B2Xy9J6P9LjrRk5Sxcx0ki69bIImtt2dmefU6xqaWM/5TkshGsRGR
+xpl/j8nWZjEgQRCHLQzWwa80mMpkg/sTV9HB8Dx6jKXB/ZUhoHHBk2dxEuqPiApp
+GWSZI1b7rCoucL5mxAyE7+WL85MB+GqQk2dLsmijtWKP6T+MejteD+eMuMZ87zf9
+dOLITzNy4ZQ5bb0Sr74MTnB8G2+NszKTc0QWbej09+CVgI+WXTik9KveCjCHk9hN
+AHFiRSdLOkKEW39lt2c0Ui2cFmuqqNh7o0JMcccMyj6D5KbvtwEwXlGjefVwaaZB
+RA+GsCyRxj3qrg+E
+-----END CERTIFICATE-----
+
+# Issuer: CN=e-Szigno Root CA 2017 O=Microsec Ltd.
+# Subject: CN=e-Szigno Root CA 2017 O=Microsec Ltd.
+# Label: "e-Szigno Root CA 2017"
+# Serial: 411379200276854331539784714
+# MD5 Fingerprint: de:1f:f6:9e:84:ae:a7:b4:21:ce:1e:58:7d:d1:84:98
+# SHA1 Fingerprint: 89:d4:83:03:4f:9e:9a:48:80:5f:72:37:d4:a9:a6:ef:cb:7c:1f:d1
+# SHA256 Fingerprint: be:b0:0b:30:83:9b:9b:c3:2c:32:e4:44:79:05:95:06:41:f2:64:21:b1:5e:d0:89:19:8b:51:8a:e2:ea:1b:99
+-----BEGIN CERTIFICATE-----
+MIICQDCCAeWgAwIBAgIMAVRI7yH9l1kN9QQKMAoGCCqGSM49BAMCMHExCzAJBgNV
+BAYTAkhVMREwDwYDVQQHDAhCdWRhcGVzdDEWMBQGA1UECgwNTWljcm9zZWMgTHRk
+LjEXMBUGA1UEYQwOVkFUSFUtMjM1ODQ0OTcxHjAcBgNVBAMMFWUtU3ppZ25vIFJv
+b3QgQ0EgMjAxNzAeFw0xNzA4MjIxMjA3MDZaFw00MjA4MjIxMjA3MDZaMHExCzAJ
+BgNVBAYTAkhVMREwDwYDVQQHDAhCdWRhcGVzdDEWMBQGA1UECgwNTWljcm9zZWMg
+THRkLjEXMBUGA1UEYQwOVkFUSFUtMjM1ODQ0OTcxHjAcBgNVBAMMFWUtU3ppZ25v
+IFJvb3QgQ0EgMjAxNzBZMBMGByqGSM49AgEGCCqGSM49AwEHA0IABJbcPYrYsHtv
+xie+RJCxs1YVe45DJH0ahFnuY2iyxl6H0BVIHqiQrb1TotreOpCmYF9oMrWGQd+H
+Wyx7xf58etqjYzBhMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0G
+A1UdDgQWBBSHERUI0arBeAyxr87GyZDvvzAEwDAfBgNVHSMEGDAWgBSHERUI0arB
+eAyxr87GyZDvvzAEwDAKBggqhkjOPQQDAgNJADBGAiEAtVfd14pVCzbhhkT61Nlo
+jbjcI4qKDdQvfepz7L9NbKgCIQDLpbQS+ue16M9+k/zzNY9vTlp8tLxOsvxyqltZ
++efcMQ==
+-----END CERTIFICATE-----
+
+# Issuer: O=CERTSIGN SA OU=certSIGN ROOT CA G2
+# Subject: O=CERTSIGN SA OU=certSIGN ROOT CA G2
+# Label: "certSIGN Root CA G2"
+# Serial: 313609486401300475190
+# MD5 Fingerprint: 8c:f1:75:8a:c6:19:cf:94:b7:f7:65:20:87:c3:97:c7
+# SHA1 Fingerprint: 26:f9:93:b4:ed:3d:28:27:b0:b9:4b:a7:e9:15:1d:a3:8d:92:e5:32
+# SHA256 Fingerprint: 65:7c:fe:2f:a7:3f:aa:38:46:25:71:f3:32:a2:36:3a:46:fc:e7:02:09:51:71:07:02:cd:fb:b6:ee:da:33:05
+-----BEGIN CERTIFICATE-----
+MIIFRzCCAy+gAwIBAgIJEQA0tk7GNi02MA0GCSqGSIb3DQEBCwUAMEExCzAJBgNV
+BAYTAlJPMRQwEgYDVQQKEwtDRVJUU0lHTiBTQTEcMBoGA1UECxMTY2VydFNJR04g
+Uk9PVCBDQSBHMjAeFw0xNzAyMDYwOTI3MzVaFw00MjAyMDYwOTI3MzVaMEExCzAJ
+BgNVBAYTAlJPMRQwEgYDVQQKEwtDRVJUU0lHTiBTQTEcMBoGA1UECxMTY2VydFNJ
+R04gUk9PVCBDQSBHMjCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAMDF
+dRmRfUR0dIf+DjuW3NgBFszuY5HnC2/OOwppGnzC46+CjobXXo9X69MhWf05N0Iw
+vlDqtg+piNguLWkh59E3GE59kdUWX2tbAMI5Qw02hVK5U2UPHULlj88F0+7cDBrZ
+uIt4ImfkabBoxTzkbFpG583H+u/E7Eu9aqSs/cwoUe+StCmrqzWaTOTECMYmzPhp
+n+Sc8CnTXPnGFiWeI8MgwT0PPzhAsP6CRDiqWhqKa2NYOLQV07YRaXseVO6MGiKs
+cpc/I1mbySKEwQdPzH/iV8oScLumZfNpdWO9lfsbl83kqK/20U6o2YpxJM02PbyW
+xPFsqa7lzw1uKA2wDrXKUXt4FMMgL3/7FFXhEZn91QqhngLjYl/rNUssuHLoPj1P
+rCy7Lobio3aP5ZMqz6WryFyNSwb/EkaseMsUBzXgqd+L6a8VTxaJW732jcZZroiF
+DsGJ6x9nxUWO/203Nit4ZoORUSs9/1F3dmKh7Gc+PoGD4FapUB8fepmrY7+EF3fx
+DTvf95xhszWYijqy7DwaNz9+j5LP2RIUZNoQAhVB/0/E6xyjyfqZ90bp4RjZsbgy
+LcsUDFDYg2WD7rlcz8sFWkz6GZdr1l0T08JcVLwyc6B49fFtHsufpaafItzRUZ6C
+eWRgKRM+o/1Pcmqr4tTluCRVLERLiohEnMqE0yo7AgMBAAGjQjBAMA8GA1UdEwEB
+/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBSCIS1mxteg4BXrzkwJ
+d8RgnlRuAzANBgkqhkiG9w0BAQsFAAOCAgEAYN4auOfyYILVAzOBywaK8SJJ6ejq
+kX/GM15oGQOGO0MBzwdw5AgeZYWR5hEit/UCI46uuR59H35s5r0l1ZUa8gWmr4UC
+b6741jH/JclKyMeKqdmfS0mbEVeZkkMR3rYzpMzXjWR91M08KCy0mpbqTfXERMQl
+qiCA2ClV9+BB/AYm/7k29UMUA2Z44RGx2iBfRgB4ACGlHgAoYXhvqAEBj500mv/0
+OJD7uNGzcgbJceaBxXntC6Z58hMLnPddDnskk7RI24Zf3lCGeOdA5jGokHZwYa+c
+NywRtYK3qq4kNFtyDGkNzVmf9nGvnAvRCjj5BiKDUyUM/FHE5r7iOZULJK2v0ZXk
+ltd0ZGtxTgI8qoXzIKNDOXZbbFD+mpwUHmUUihW9o4JFWklWatKcsWMy5WHgUyIO
+pwpJ6st+H6jiYoD2EEVSmAYY3qXNL3+q1Ok+CHLsIwMCPKaq2LxndD0UF/tUSxfj
+03k9bWtJySgOLnRQvwzZRjoQhsmnP+mg7H/rpXdYaXHmgwo38oZJar55CJD2AhZk
+PuXaTH4MNMn5X7azKFGnpyuqSfqNZSlO42sTp5SjLVFteAxEy9/eCG/Oo2Sr05WE
+1LlSVHJ7liXMvGnjSG4N0MedJ5qq+BOS3R7fY581qRY27Iy4g/Q9iY/NtBde17MX
+QRBdJ3NghVdJIgc=
+-----END CERTIFICATE-----
+
+# Issuer: CN=Trustwave Global Certification Authority O=Trustwave Holdings, Inc.
+# Subject: CN=Trustwave Global Certification Authority O=Trustwave Holdings, Inc.
+# Label: "Trustwave Global Certification Authority"
+# Serial: 1846098327275375458322922162
+# MD5 Fingerprint: f8:1c:18:2d:2f:ba:5f:6d:a1:6c:bc:c7:ab:91:c7:0e
+# SHA1 Fingerprint: 2f:8f:36:4f:e1:58:97:44:21:59:87:a5:2a:9a:d0:69:95:26:7f:b5
+# SHA256 Fingerprint: 97:55:20:15:f5:dd:fc:3c:87:88:c0:06:94:45:55:40:88:94:45:00:84:f1:00:86:70:86:bc:1a:2b:b5:8d:c8
+-----BEGIN CERTIFICATE-----
+MIIF2jCCA8KgAwIBAgIMBfcOhtpJ80Y1LrqyMA0GCSqGSIb3DQEBCwUAMIGIMQsw
+CQYDVQQGEwJVUzERMA8GA1UECAwISWxsaW5vaXMxEDAOBgNVBAcMB0NoaWNhZ28x
+ITAfBgNVBAoMGFRydXN0d2F2ZSBIb2xkaW5ncywgSW5jLjExMC8GA1UEAwwoVHJ1
+c3R3YXZlIEdsb2JhbCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0xNzA4MjMx
+OTM0MTJaFw00MjA4MjMxOTM0MTJaMIGIMQswCQYDVQQGEwJVUzERMA8GA1UECAwI
+SWxsaW5vaXMxEDAOBgNVBAcMB0NoaWNhZ28xITAfBgNVBAoMGFRydXN0d2F2ZSBI
+b2xkaW5ncywgSW5jLjExMC8GA1UEAwwoVHJ1c3R3YXZlIEdsb2JhbCBDZXJ0aWZp
+Y2F0aW9uIEF1dGhvcml0eTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIB
+ALldUShLPDeS0YLOvR29zd24q88KPuFd5dyqCblXAj7mY2Hf8g+CY66j96xz0Xzn
+swuvCAAJWX/NKSqIk4cXGIDtiLK0thAfLdZfVaITXdHG6wZWiYj+rDKd/VzDBcdu
+7oaJuogDnXIhhpCujwOl3J+IKMujkkkP7NAP4m1ET4BqstTnoApTAbqOl5F2brz8
+1Ws25kCI1nsvXwXoLG0R8+eyvpJETNKXpP7ScoFDB5zpET71ixpZfR9oWN0EACyW
+80OzfpgZdNmcc9kYvkHHNHnZ9GLCQ7mzJ7Aiy/k9UscwR7PJPrhq4ufogXBeQotP
+JqX+OsIgbrv4Fo7NDKm0G2x2EOFYeUY+VM6AqFcJNykbmROPDMjWLBz7BegIlT1l
+RtzuzWniTY+HKE40Cz7PFNm73bZQmq131BnW2hqIyE4bJ3XYsgjxroMwuREOzYfw
+hI0Vcnyh78zyiGG69Gm7DIwLdVcEuE4qFC49DxweMqZiNu5m4iK4BUBjECLzMx10
+coos9TkpoNPnG4CELcU9402x/RpvumUHO1jsQkUm+9jaJXLE9gCxInm943xZYkqc
+BW89zubWR2OZxiRvchLIrH+QtAuRcOi35hYQcRfO3gZPSEF9NUqjifLJS3tBEW1n
+twiYTOURGa5CgNz7kAXU+FDKvuStx8KU1xad5hePrzb7AgMBAAGjQjBAMA8GA1Ud
+EwEB/wQFMAMBAf8wHQYDVR0OBBYEFJngGWcNYtt2s9o9uFvo/ULSMQ6HMA4GA1Ud
+DwEB/wQEAwIBBjANBgkqhkiG9w0BAQsFAAOCAgEAmHNw4rDT7TnsTGDZqRKGFx6W
+0OhUKDtkLSGm+J1WE2pIPU/HPinbbViDVD2HfSMF1OQc3Og4ZYbFdada2zUFvXfe
+uyk3QAUHw5RSn8pk3fEbK9xGChACMf1KaA0HZJDmHvUqoai7PF35owgLEQzxPy0Q
+lG/+4jSHg9bP5Rs1bdID4bANqKCqRieCNqcVtgimQlRXtpla4gt5kNdXElE1GYhB
+aCXUNxeEFfsBctyV3lImIJgm4nb1J2/6ADtKYdkNy1GTKv0WBpanI5ojSP5RvbbE
+sLFUzt5sQa0WZ37b/TjNuThOssFgy50X31ieemKyJo90lZvkWx3SD92YHJtZuSPT
+MaCm/zjdzyBP6VhWOmfD0faZmZ26NraAL4hHT4a/RDqA5Dccprrql5gR0IRiR2Qe
+qu5AvzSxnI9O4fKSTx+O856X3vOmeWqJcU9LJxdI/uz0UA9PSX3MReO9ekDFQdxh
+VicGaeVyQYHTtgGJoC86cnn+OjC/QezHYj6RS8fZMXZC+fc8Y+wmjHMMfRod6qh8
+h6jCJ3zhM0EPz8/8AKAigJ5Kp28AsEFFtyLKaEjFQqKu3R3y4G5OBVixwJAWKqQ9
+EEC+j2Jjg6mcgn0tAumDMHzLJ8n9HmYAsC7TIS+OMxZsmO0QqAfWzJPP29FpHOTK
+yeC2nOnOcXHebD8WpHk=
+-----END CERTIFICATE-----
+
+# Issuer: CN=Trustwave Global ECC P256 Certification Authority O=Trustwave Holdings, Inc.
+# Subject: CN=Trustwave Global ECC P256 Certification Authority O=Trustwave Holdings, Inc.
+# Label: "Trustwave Global ECC P256 Certification Authority"
+# Serial: 4151900041497450638097112925
+# MD5 Fingerprint: 5b:44:e3:8d:5d:36:86:26:e8:0d:05:d2:59:a7:83:54
+# SHA1 Fingerprint: b4:90:82:dd:45:0c:be:8b:5b:b1:66:d3:e2:a4:08:26:cd:ed:42:cf
+# SHA256 Fingerprint: 94:5b:bc:82:5e:a5:54:f4:89:d1:fd:51:a7:3d:df:2e:a6:24:ac:70:19:a0:52:05:22:5c:22:a7:8c:cf:a8:b4
+-----BEGIN CERTIFICATE-----
+MIICYDCCAgegAwIBAgIMDWpfCD8oXD5Rld9dMAoGCCqGSM49BAMCMIGRMQswCQYD
+VQQGEwJVUzERMA8GA1UECBMISWxsaW5vaXMxEDAOBgNVBAcTB0NoaWNhZ28xITAf
+BgNVBAoTGFRydXN0d2F2ZSBIb2xkaW5ncywgSW5jLjE6MDgGA1UEAxMxVHJ1c3R3
+YXZlIEdsb2JhbCBFQ0MgUDI1NiBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0x
+NzA4MjMxOTM1MTBaFw00MjA4MjMxOTM1MTBaMIGRMQswCQYDVQQGEwJVUzERMA8G
+A1UECBMISWxsaW5vaXMxEDAOBgNVBAcTB0NoaWNhZ28xITAfBgNVBAoTGFRydXN0
+d2F2ZSBIb2xkaW5ncywgSW5jLjE6MDgGA1UEAxMxVHJ1c3R3YXZlIEdsb2JhbCBF
+Q0MgUDI1NiBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTBZMBMGByqGSM49AgEGCCqG
+SM49AwEHA0IABH77bOYj43MyCMpg5lOcunSNGLB4kFKA3TjASh3RqMyTpJcGOMoN
+FWLGjgEqZZ2q3zSRLoHB5DOSMcT9CTqmP62jQzBBMA8GA1UdEwEB/wQFMAMBAf8w
+DwYDVR0PAQH/BAUDAwcGADAdBgNVHQ4EFgQUo0EGrJBt0UrrdaVKEJmzsaGLSvcw
+CgYIKoZIzj0EAwIDRwAwRAIgB+ZU2g6gWrKuEZ+Hxbb/ad4lvvigtwjzRM4q3wgh
+DDcCIC0mA6AFvWvR9lz4ZcyGbbOcNEhjhAnFjXca4syc4XR7
+-----END CERTIFICATE-----
+
+# Issuer: CN=Trustwave Global ECC P384 Certification Authority O=Trustwave Holdings, Inc.
+# Subject: CN=Trustwave Global ECC P384 Certification Authority O=Trustwave Holdings, Inc.
+# Label: "Trustwave Global ECC P384 Certification Authority"
+# Serial: 2704997926503831671788816187
+# MD5 Fingerprint: ea:cf:60:c4:3b:b9:15:29:40:a1:97:ed:78:27:93:d6
+# SHA1 Fingerprint: e7:f3:a3:c8:cf:6f:c3:04:2e:6d:0e:67:32:c5:9e:68:95:0d:5e:d2
+# SHA256 Fingerprint: 55:90:38:59:c8:c0:c3:eb:b8:75:9e:ce:4e:25:57:22:5f:f5:75:8b:bd:38:eb:d4:82:76:60:1e:1b:d5:80:97
+-----BEGIN CERTIFICATE-----
+MIICnTCCAiSgAwIBAgIMCL2Fl2yZJ6SAaEc7MAoGCCqGSM49BAMDMIGRMQswCQYD
+VQQGEwJVUzERMA8GA1UECBMISWxsaW5vaXMxEDAOBgNVBAcTB0NoaWNhZ28xITAf
+BgNVBAoTGFRydXN0d2F2ZSBIb2xkaW5ncywgSW5jLjE6MDgGA1UEAxMxVHJ1c3R3
+YXZlIEdsb2JhbCBFQ0MgUDM4NCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0x
+NzA4MjMxOTM2NDNaFw00MjA4MjMxOTM2NDNaMIGRMQswCQYDVQQGEwJVUzERMA8G
+A1UECBMISWxsaW5vaXMxEDAOBgNVBAcTB0NoaWNhZ28xITAfBgNVBAoTGFRydXN0
+d2F2ZSBIb2xkaW5ncywgSW5jLjE6MDgGA1UEAxMxVHJ1c3R3YXZlIEdsb2JhbCBF
+Q0MgUDM4NCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTB2MBAGByqGSM49AgEGBSuB
+BAAiA2IABGvaDXU1CDFHBa5FmVXxERMuSvgQMSOjfoPTfygIOiYaOs+Xgh+AtycJ
+j9GOMMQKmw6sWASr9zZ9lCOkmwqKi6vr/TklZvFe/oyujUF5nQlgziip04pt89ZF
+1PKYhDhloKNDMEEwDwYDVR0TAQH/BAUwAwEB/zAPBgNVHQ8BAf8EBQMDBwYAMB0G
+A1UdDgQWBBRVqYSJ0sEyvRjLbKYHTsjnnb6CkDAKBggqhkjOPQQDAwNnADBkAjA3
+AZKXRRJ+oPM+rRk6ct30UJMDEr5E0k9BpIycnR+j9sKS50gU/k6bpZFXrsY3crsC
+MGclCrEMXu6pY5Jv5ZAL/mYiykf9ijH3g/56vxC+GCsej/YpHpRZ744hN8tRmKVu
+Sw==
+-----END CERTIFICATE-----
+
+# Issuer: CN=NAVER Global Root Certification Authority O=NAVER BUSINESS PLATFORM Corp.
+# Subject: CN=NAVER Global Root Certification Authority O=NAVER BUSINESS PLATFORM Corp.
+# Label: "NAVER Global Root Certification Authority"
+# Serial: 9013692873798656336226253319739695165984492813
+# MD5 Fingerprint: c8:7e:41:f6:25:3b:f5:09:b3:17:e8:46:3d:bf:d0:9b
+# SHA1 Fingerprint: 8f:6b:f2:a9:27:4a:da:14:a0:c4:f4:8e:61:27:f9:c0:1e:78:5d:d1
+# SHA256 Fingerprint: 88:f4:38:dc:f8:ff:d1:fa:8f:42:91:15:ff:e5:f8:2a:e1:e0:6e:0c:70:c3:75:fa:ad:71:7b:34:a4:9e:72:65
+-----BEGIN CERTIFICATE-----
+MIIFojCCA4qgAwIBAgIUAZQwHqIL3fXFMyqxQ0Rx+NZQTQ0wDQYJKoZIhvcNAQEM
+BQAwaTELMAkGA1UEBhMCS1IxJjAkBgNVBAoMHU5BVkVSIEJVU0lORVNTIFBMQVRG
+T1JNIENvcnAuMTIwMAYDVQQDDClOQVZFUiBHbG9iYWwgUm9vdCBDZXJ0aWZpY2F0
+aW9uIEF1dGhvcml0eTAeFw0xNzA4MTgwODU4NDJaFw0zNzA4MTgyMzU5NTlaMGkx
+CzAJBgNVBAYTAktSMSYwJAYDVQQKDB1OQVZFUiBCVVNJTkVTUyBQTEFURk9STSBD
+b3JwLjEyMDAGA1UEAwwpTkFWRVIgR2xvYmFsIFJvb3QgQ2VydGlmaWNhdGlvbiBB
+dXRob3JpdHkwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQC21PGTXLVA
+iQqrDZBbUGOukJR0F0Vy1ntlWilLp1agS7gvQnXp2XskWjFlqxcX0TM62RHcQDaH
+38dq6SZeWYp34+hInDEW+j6RscrJo+KfziFTowI2MMtSAuXaMl3Dxeb57hHHi8lE
+HoSTGEq0n+USZGnQJoViAbbJAh2+g1G7XNr4rRVqmfeSVPc0W+m/6imBEtRTkZaz
+kVrd/pBzKPswRrXKCAfHcXLJZtM0l/aM9BhK4dA9WkW2aacp+yPOiNgSnABIqKYP
+szuSjXEOdMWLyEz59JuOuDxp7W87UC9Y7cSw0BwbagzivESq2M0UXZR4Yb8Obtoq
+vC8MC3GmsxY/nOb5zJ9TNeIDoKAYv7vxvvTWjIcNQvcGufFt7QSUqP620wbGQGHf
+nZ3zVHbOUzoBppJB7ASjjw2i1QnK1sua8e9DXcCrpUHPXFNwcMmIpi3Ua2FzUCaG
+YQ5fG8Ir4ozVu53BA0K6lNpfqbDKzE0K70dpAy8i+/Eozr9dUGWokG2zdLAIx6yo
+0es+nPxdGoMuK8u180SdOqcXYZaicdNwlhVNt0xz7hlcxVs+Qf6sdWA7G2POAN3a
+CJBitOUt7kinaxeZVL6HSuOpXgRM6xBtVNbv8ejyYhbLgGvtPe31HzClrkvJE+2K
+AQHJuFFYwGY6sWZLxNUxAmLpdIQM201GLQIDAQABo0IwQDAdBgNVHQ4EFgQU0p+I
+36HNLL3s9TsBAZMzJ7LrYEswDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMB
+Af8wDQYJKoZIhvcNAQEMBQADggIBADLKgLOdPVQG3dLSLvCkASELZ0jKbY7gyKoN
+qo0hV4/GPnrK21HUUrPUloSlWGB/5QuOH/XcChWB5Tu2tyIvCZwTFrFsDDUIbatj
+cu3cvuzHV+YwIHHW1xDBE1UBjCpD5EHxzzp6U5LOogMFDTjfArsQLtk70pt6wKGm
++LUx5vR1yblTmXVHIloUFcd4G7ad6Qz4G3bxhYTeodoS76TiEJd6eN4MUZeoIUCL
+hr0N8F5OSza7OyAfikJW4Qsav3vQIkMsRIz75Sq0bBwcupTgE34h5prCy8VCZLQe
+lHsIJchxzIdFV4XTnyliIoNRlwAYl3dqmJLJfGBs32x9SuRwTMKeuB330DTHD8z7
+p/8Dvq1wkNoL3chtl1+afwkyQf3NosxabUzyqkn+Zvjp2DXrDige7kgvOtB5CTh8
+piKCk5XQA76+AqAF3SAi428diDRgxuYKuQl1C/AH6GmWNcf7I4GOODm4RStDeKLR
+LBT/DShycpWbXgnbiUSYqqFJu3FS8r/2/yehNq+4tneI3TqkbZs0kNwUXTC/t+sX
+5Ie3cdCh13cV1ELX8vMxmV2b3RZtP+oGI/hGoiLtk/bdmuYqh7GYVPEi92tF4+KO
+dh2ajcQGjTa3FPOdVGm3jjzVpG2Tgbet9r1ke8LJaDmgkpzNNIaRkPpkUZ3+/uul
+9XXeifdy
+-----END CERTIFICATE-----
+
+# Issuer: CN=AC RAIZ FNMT-RCM SERVIDORES SEGUROS O=FNMT-RCM OU=Ceres
+# Subject: CN=AC RAIZ FNMT-RCM SERVIDORES SEGUROS O=FNMT-RCM OU=Ceres
+# Label: "AC RAIZ FNMT-RCM SERVIDORES SEGUROS"
+# Serial: 131542671362353147877283741781055151509
+# MD5 Fingerprint: 19:36:9c:52:03:2f:d2:d1:bb:23:cc:dd:1e:12:55:bb
+# SHA1 Fingerprint: 62:ff:d9:9e:c0:65:0d:03:ce:75:93:d2:ed:3f:2d:32:c9:e3:e5:4a
+# SHA256 Fingerprint: 55:41:53:b1:3d:2c:f9:dd:b7:53:bf:be:1a:4e:0a:e0:8d:0a:a4:18:70:58:fe:60:a2:b8:62:b2:e4:b8:7b:cb
+-----BEGIN CERTIFICATE-----
+MIICbjCCAfOgAwIBAgIQYvYybOXE42hcG2LdnC6dlTAKBggqhkjOPQQDAzB4MQsw
+CQYDVQQGEwJFUzERMA8GA1UECgwIRk5NVC1SQ00xDjAMBgNVBAsMBUNlcmVzMRgw
+FgYDVQRhDA9WQVRFUy1RMjgyNjAwNEoxLDAqBgNVBAMMI0FDIFJBSVogRk5NVC1S
+Q00gU0VSVklET1JFUyBTRUdVUk9TMB4XDTE4MTIyMDA5MzczM1oXDTQzMTIyMDA5
+MzczM1oweDELMAkGA1UEBhMCRVMxETAPBgNVBAoMCEZOTVQtUkNNMQ4wDAYDVQQL
+DAVDZXJlczEYMBYGA1UEYQwPVkFURVMtUTI4MjYwMDRKMSwwKgYDVQQDDCNBQyBS
+QUlaIEZOTVQtUkNNIFNFUlZJRE9SRVMgU0VHVVJPUzB2MBAGByqGSM49AgEGBSuB
+BAAiA2IABPa6V1PIyqvfNkpSIeSX0oNnnvBlUdBeh8dHsVnyV0ebAAKTRBdp20LH
+sbI6GA60XYyzZl2hNPk2LEnb80b8s0RpRBNm/dfF/a82Tc4DTQdxz69qBdKiQ1oK
+Um8BA06Oi6NCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYD
+VR0OBBYEFAG5L++/EYZg8k/QQW6rcx/n0m5JMAoGCCqGSM49BAMDA2kAMGYCMQCu
+SuMrQMN0EfKVrRYj3k4MGuZdpSRea0R7/DjiT8ucRRcRTBQnJlU5dUoDzBOQn5IC
+MQD6SmxgiHPz7riYYqnOK8LZiqZwMR2vsJRM60/G49HzYqc8/5MuB1xJAWdpEgJy
+v+c=
+-----END CERTIFICATE-----
+
+# Issuer: CN=GlobalSign Root R46 O=GlobalSign nv-sa
+# Subject: CN=GlobalSign Root R46 O=GlobalSign nv-sa
+# Label: "GlobalSign Root R46"
+# Serial: 1552617688466950547958867513931858518042577
+# MD5 Fingerprint: c4:14:30:e4:fa:66:43:94:2a:6a:1b:24:5f:19:d0:ef
+# SHA1 Fingerprint: 53:a2:b0:4b:ca:6b:d6:45:e6:39:8a:8e:c4:0d:d2:bf:77:c3:a2:90
+# SHA256 Fingerprint: 4f:a3:12:6d:8d:3a:11:d1:c4:85:5a:4f:80:7c:ba:d6:cf:91:9d:3a:5a:88:b0:3b:ea:2c:63:72:d9:3c:40:c9
+-----BEGIN CERTIFICATE-----
+MIIFWjCCA0KgAwIBAgISEdK7udcjGJ5AXwqdLdDfJWfRMA0GCSqGSIb3DQEBDAUA
+MEYxCzAJBgNVBAYTAkJFMRkwFwYDVQQKExBHbG9iYWxTaWduIG52LXNhMRwwGgYD
+VQQDExNHbG9iYWxTaWduIFJvb3QgUjQ2MB4XDTE5MDMyMDAwMDAwMFoXDTQ2MDMy
+MDAwMDAwMFowRjELMAkGA1UEBhMCQkUxGTAXBgNVBAoTEEdsb2JhbFNpZ24gbnYt
+c2ExHDAaBgNVBAMTE0dsb2JhbFNpZ24gUm9vdCBSNDYwggIiMA0GCSqGSIb3DQEB
+AQUAA4ICDwAwggIKAoICAQCsrHQy6LNl5brtQyYdpokNRbopiLKkHWPd08EsCVeJ
+OaFV6Wc0dwxu5FUdUiXSE2te4R2pt32JMl8Nnp8semNgQB+msLZ4j5lUlghYruQG
+vGIFAha/r6gjA7aUD7xubMLL1aa7DOn2wQL7Id5m3RerdELv8HQvJfTqa1VbkNud
+316HCkD7rRlr+/fKYIje2sGP1q7Vf9Q8g+7XFkyDRTNrJ9CG0Bwta/OrffGFqfUo
+0q3v84RLHIf8E6M6cqJaESvWJ3En7YEtbWaBkoe0G1h6zD8K+kZPTXhc+CtI4wSE
+y132tGqzZfxCnlEmIyDLPRT5ge1lFgBPGmSXZgjPjHvjK8Cd+RTyG/FWaha/LIWF
+zXg4mutCagI0GIMXTpRW+LaCtfOW3T3zvn8gdz57GSNrLNRyc0NXfeD412lPFzYE
++cCQYDdF3uYM2HSNrpyibXRdQr4G9dlkbgIQrImwTDsHTUB+JMWKmIJ5jqSngiCN
+I/onccnfxkF0oE32kRbcRoxfKWMxWXEM2G/CtjJ9++ZdU6Z+Ffy7dXxd7Pj2Fxzs
+x2sZy/N78CsHpdlseVR2bJ0cpm4O6XkMqCNqo98bMDGfsVR7/mrLZqrcZdCinkqa
+ByFrgY/bxFn63iLABJzjqls2k+g9vXqhnQt2sQvHnf3PmKgGwvgqo6GDoLclcqUC
+4wIDAQABo0IwQDAOBgNVHQ8BAf8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB/zAdBgNV
+HQ4EFgQUA1yrc4GHqMywptWU4jaWSf8FmSwwDQYJKoZIhvcNAQEMBQADggIBAHx4
+7PYCLLtbfpIrXTncvtgdokIzTfnvpCo7RGkerNlFo048p9gkUbJUHJNOxO97k4Vg
+JuoJSOD1u8fpaNK7ajFxzHmuEajwmf3lH7wvqMxX63bEIaZHU1VNaL8FpO7XJqti
+2kM3S+LGteWygxk6x9PbTZ4IevPuzz5i+6zoYMzRx6Fcg0XERczzF2sUyQQCPtIk
+pnnpHs6i58FZFZ8d4kuaPp92CC1r2LpXFNqD6v6MVenQTqnMdzGxRBF6XLE+0xRF
+FRhiJBPSy03OXIPBNvIQtQ6IbbjhVp+J3pZmOUdkLG5NrmJ7v2B0GbhWrJKsFjLt
+rWhV/pi60zTe9Mlhww6G9kuEYO4Ne7UyWHmRVSyBQ7N0H3qqJZ4d16GLuc1CLgSk
+ZoNNiTW2bKg2SnkheCLQQrzRQDGQob4Ez8pn7fXwgNNgyYMqIgXQBztSvwyeqiv5
+u+YfjyW6hY0XHgL+XVAEV8/+LbzvXMAaq7afJMbfc2hIkCwU9D9SGuTSyxTDYWnP
+4vkYxboznxSjBF25cfe1lNj2M8FawTSLfJvdkzrnE6JwYZ+vj+vYxXX4M2bUdGc6
+N3ec592kD3ZDZopD8p/7DEJ4Y9HiD2971KE9dJeFt0g5QdYg/NA6s/rob8SKunE3
+vouXsXgxT7PntgMTzlSdriVZzH81Xwj3QEUxeCp6
+-----END CERTIFICATE-----
+
+# Issuer: CN=GlobalSign Root E46 O=GlobalSign nv-sa
+# Subject: CN=GlobalSign Root E46 O=GlobalSign nv-sa
+# Label: "GlobalSign Root E46"
+# Serial: 1552617690338932563915843282459653771421763
+# MD5 Fingerprint: b5:b8:66:ed:de:08:83:e3:c9:e2:01:34:06:ac:51:6f
+# SHA1 Fingerprint: 39:b4:6c:d5:fe:80:06:eb:e2:2f:4a:bb:08:33:a0:af:db:b9:dd:84
+# SHA256 Fingerprint: cb:b9:c4:4d:84:b8:04:3e:10:50:ea:31:a6:9f:51:49:55:d7:bf:d2:e2:c6:b4:93:01:01:9a:d6:1d:9f:50:58
+-----BEGIN CERTIFICATE-----
+MIICCzCCAZGgAwIBAgISEdK7ujNu1LzmJGjFDYQdmOhDMAoGCCqGSM49BAMDMEYx
+CzAJBgNVBAYTAkJFMRkwFwYDVQQKExBHbG9iYWxTaWduIG52LXNhMRwwGgYDVQQD
+ExNHbG9iYWxTaWduIFJvb3QgRTQ2MB4XDTE5MDMyMDAwMDAwMFoXDTQ2MDMyMDAw
+MDAwMFowRjELMAkGA1UEBhMCQkUxGTAXBgNVBAoTEEdsb2JhbFNpZ24gbnYtc2Ex
+HDAaBgNVBAMTE0dsb2JhbFNpZ24gUm9vdCBFNDYwdjAQBgcqhkjOPQIBBgUrgQQA
+IgNiAAScDrHPt+ieUnd1NPqlRqetMhkytAepJ8qUuwzSChDH2omwlwxwEwkBjtjq
+R+q+soArzfwoDdusvKSGN+1wCAB16pMLey5SnCNoIwZD7JIvU4Tb+0cUB+hflGdd
+yXqBPCCjQjBAMA4GA1UdDwEB/wQEAwIBhjAPBgNVHRMBAf8EBTADAQH/MB0GA1Ud
+DgQWBBQxCpCPtsad0kRLgLWi5h+xEk8blTAKBggqhkjOPQQDAwNoADBlAjEA31SQ
+7Zvvi5QCkxeCmb6zniz2C5GMn0oUsfZkvLtoURMMA/cVi4RguYv/Uo7njLwcAjA8
++RHUjE7AwWHCFUyqqx0LMV87HOIAl0Qx5v5zli/altP+CAezNIm8BZ/3Hobui3A=
+-----END CERTIFICATE-----
+
+# Issuer: CN=ANF Secure Server Root CA O=ANF Autoridad de Certificacion OU=ANF CA Raiz
+# Subject: CN=ANF Secure Server Root CA O=ANF Autoridad de Certificacion OU=ANF CA Raiz
+# Label: "ANF Secure Server Root CA"
+# Serial: 996390341000653745
+# MD5 Fingerprint: 26:a6:44:5a:d9:af:4e:2f:b2:1d:b6:65:b0:4e:e8:96
+# SHA1 Fingerprint: 5b:6e:68:d0:cc:15:b6:a0:5f:1e:c1:5f:ae:02:fc:6b:2f:5d:6f:74
+# SHA256 Fingerprint: fb:8f:ec:75:91:69:b9:10:6b:1e:51:16:44:c6:18:c5:13:04:37:3f:6c:06:43:08:8d:8b:ef:fd:1b:99:75:99
+-----BEGIN CERTIFICATE-----
+MIIF7zCCA9egAwIBAgIIDdPjvGz5a7EwDQYJKoZIhvcNAQELBQAwgYQxEjAQBgNV
+BAUTCUc2MzI4NzUxMDELMAkGA1UEBhMCRVMxJzAlBgNVBAoTHkFORiBBdXRvcmlk
+YWQgZGUgQ2VydGlmaWNhY2lvbjEUMBIGA1UECxMLQU5GIENBIFJhaXoxIjAgBgNV
+BAMTGUFORiBTZWN1cmUgU2VydmVyIFJvb3QgQ0EwHhcNMTkwOTA0MTAwMDM4WhcN
+MzkwODMwMTAwMDM4WjCBhDESMBAGA1UEBRMJRzYzMjg3NTEwMQswCQYDVQQGEwJF
+UzEnMCUGA1UEChMeQU5GIEF1dG9yaWRhZCBkZSBDZXJ0aWZpY2FjaW9uMRQwEgYD
+VQQLEwtBTkYgQ0EgUmFpejEiMCAGA1UEAxMZQU5GIFNlY3VyZSBTZXJ2ZXIgUm9v
+dCBDQTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBANvrayvmZFSVgpCj
+cqQZAZ2cC4Ffc0m6p6zzBE57lgvsEeBbphzOG9INgxwruJ4dfkUyYA8H6XdYfp9q
+yGFOtibBTI3/TO80sh9l2Ll49a2pcbnvT1gdpd50IJeh7WhM3pIXS7yr/2WanvtH
+2Vdy8wmhrnZEE26cLUQ5vPnHO6RYPUG9tMJJo8gN0pcvB2VSAKduyK9o7PQUlrZX
+H1bDOZ8rbeTzPvY1ZNoMHKGESy9LS+IsJJ1tk0DrtSOOMspvRdOoiXsezx76W0OL
+zc2oD2rKDF65nkeP8Nm2CgtYZRczuSPkdxl9y0oukntPLxB3sY0vaJxizOBQ+OyR
+p1RMVwnVdmPF6GUe7m1qzwmd+nxPrWAI/VaZDxUse6mAq4xhj0oHdkLePfTdsiQz
+W7i1o0TJrH93PB0j7IKppuLIBkwC/qxcmZkLLxCKpvR/1Yd0DVlJRfbwcVw5Kda/
+SiOL9V8BY9KHcyi1Swr1+KuCLH5zJTIdC2MKF4EA/7Z2Xue0sUDKIbvVgFHlSFJn
+LNJhiQcND85Cd8BEc5xEUKDbEAotlRyBr+Qc5RQe8TZBAQIvfXOn3kLMTOmJDVb3
+n5HUA8ZsyY/b2BzgQJhdZpmYgG4t/wHFzstGH6wCxkPmrqKEPMVOHj1tyRRM4y5B
+u8o5vzY8KhmqQYdOpc5LMnndkEl/AgMBAAGjYzBhMB8GA1UdIwQYMBaAFJxf0Gxj
+o1+TypOYCK2Mh6UsXME3MB0GA1UdDgQWBBScX9BsY6Nfk8qTmAitjIelLFzBNzAO
+BgNVHQ8BAf8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB/zANBgkqhkiG9w0BAQsFAAOC
+AgEATh65isagmD9uw2nAalxJUqzLK114OMHVVISfk/CHGT0sZonrDUL8zPB1hT+L
+9IBdeeUXZ701guLyPI59WzbLWoAAKfLOKyzxj6ptBZNscsdW699QIyjlRRA96Gej
+rw5VD5AJYu9LWaL2U/HANeQvwSS9eS9OICI7/RogsKQOLHDtdD+4E5UGUcjohybK
+pFtqFiGS3XNgnhAY3jyB6ugYw3yJ8otQPr0R4hUDqDZ9MwFsSBXXiJCZBMXM5gf0
+vPSQ7RPi6ovDj6MzD8EpTBNO2hVWcXNyglD2mjN8orGoGjR0ZVzO0eurU+AagNjq
+OknkJjCb5RyKqKkVMoaZkgoQI1YS4PbOTOK7vtuNknMBZi9iPrJyJ0U27U1W45eZ
+/zo1PqVUSlJZS2Db7v54EX9K3BR5YLZrZAPbFYPhor72I5dQ8AkzNqdxliXzuUJ9
+2zg/LFis6ELhDtjTO0wugumDLmsx2d1Hhk9tl5EuT+IocTUW0fJz/iUrB0ckYyfI
++PbZa/wSMVYIwFNCr5zQM378BvAxRAMU8Vjq8moNqRGyg77FGr8H6lnco4g175x2
+MjxNBiLOFeXdntiP2t7SxDnlF4HPOEfrf4htWRvfn0IUrn7PqLBmZdo3r5+qPeoo
+tt7VMVgWglvquxl1AnMaykgaIZOQCo6ThKd9OyMYkomgjaw=
+-----END CERTIFICATE-----
+
+# Issuer: CN=Certum EC-384 CA O=Asseco Data Systems S.A. OU=Certum Certification Authority
+# Subject: CN=Certum EC-384 CA O=Asseco Data Systems S.A. OU=Certum Certification Authority
+# Label: "Certum EC-384 CA"
+# Serial: 160250656287871593594747141429395092468
+# MD5 Fingerprint: b6:65:b3:96:60:97:12:a1:ec:4e:e1:3d:a3:c6:c9:f1
+# SHA1 Fingerprint: f3:3e:78:3c:ac:df:f4:a2:cc:ac:67:55:69:56:d7:e5:16:3c:e1:ed
+# SHA256 Fingerprint: 6b:32:80:85:62:53:18:aa:50:d1:73:c9:8d:8b:da:09:d5:7e:27:41:3d:11:4c:f7:87:a0:f5:d0:6c:03:0c:f6
+-----BEGIN CERTIFICATE-----
+MIICZTCCAeugAwIBAgIQeI8nXIESUiClBNAt3bpz9DAKBggqhkjOPQQDAzB0MQsw
+CQYDVQQGEwJQTDEhMB8GA1UEChMYQXNzZWNvIERhdGEgU3lzdGVtcyBTLkEuMScw
+JQYDVQQLEx5DZXJ0dW0gQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkxGTAXBgNVBAMT
+EENlcnR1bSBFQy0zODQgQ0EwHhcNMTgwMzI2MDcyNDU0WhcNNDMwMzI2MDcyNDU0
+WjB0MQswCQYDVQQGEwJQTDEhMB8GA1UEChMYQXNzZWNvIERhdGEgU3lzdGVtcyBT
+LkEuMScwJQYDVQQLEx5DZXJ0dW0gQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkxGTAX
+BgNVBAMTEENlcnR1bSBFQy0zODQgQ0EwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAATE
+KI6rGFtqvm5kN2PkzeyrOvfMobgOgknXhimfoZTy42B4mIF4Bk3y7JoOV2CDn7Tm
+Fy8as10CW4kjPMIRBSqniBMY81CE1700LCeJVf/OTOffph8oxPBUw7l8t1Ot68Kj
+QjBAMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFI0GZnQkdjrzife81r1HfS+8
+EF9LMA4GA1UdDwEB/wQEAwIBBjAKBggqhkjOPQQDAwNoADBlAjADVS2m5hjEfO/J
+UG7BJw+ch69u1RsIGL2SKcHvlJF40jocVYli5RsJHrpka/F2tNQCMQC0QoSZ/6vn
+nvuRlydd3LBbMHHOXjgaatkl5+r3YZJW+OraNsKHZZYuciUvf9/DE8k=
+-----END CERTIFICATE-----
+
+# Issuer: CN=Certum Trusted Root CA O=Asseco Data Systems S.A. OU=Certum Certification Authority
+# Subject: CN=Certum Trusted Root CA O=Asseco Data Systems S.A. OU=Certum Certification Authority
+# Label: "Certum Trusted Root CA"
+# Serial: 40870380103424195783807378461123655149
+# MD5 Fingerprint: 51:e1:c2:e7:fe:4c:84:af:59:0e:2f:f4:54:6f:ea:29
+# SHA1 Fingerprint: c8:83:44:c0:18:ae:9f:cc:f1:87:b7:8f:22:d1:c5:d7:45:84:ba:e5
+# SHA256 Fingerprint: fe:76:96:57:38:55:77:3e:37:a9:5e:7a:d4:d9:cc:96:c3:01:57:c1:5d:31:76:5b:a9:b1:57:04:e1:ae:78:fd
+-----BEGIN CERTIFICATE-----
+MIIFwDCCA6igAwIBAgIQHr9ZULjJgDdMBvfrVU+17TANBgkqhkiG9w0BAQ0FADB6
+MQswCQYDVQQGEwJQTDEhMB8GA1UEChMYQXNzZWNvIERhdGEgU3lzdGVtcyBTLkEu
+MScwJQYDVQQLEx5DZXJ0dW0gQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkxHzAdBgNV
+BAMTFkNlcnR1bSBUcnVzdGVkIFJvb3QgQ0EwHhcNMTgwMzE2MTIxMDEzWhcNNDMw
+MzE2MTIxMDEzWjB6MQswCQYDVQQGEwJQTDEhMB8GA1UEChMYQXNzZWNvIERhdGEg
+U3lzdGVtcyBTLkEuMScwJQYDVQQLEx5DZXJ0dW0gQ2VydGlmaWNhdGlvbiBBdXRo
+b3JpdHkxHzAdBgNVBAMTFkNlcnR1bSBUcnVzdGVkIFJvb3QgQ0EwggIiMA0GCSqG
+SIb3DQEBAQUAA4ICDwAwggIKAoICAQDRLY67tzbqbTeRn06TpwXkKQMlzhyC93yZ
+n0EGze2jusDbCSzBfN8pfktlL5On1AFrAygYo9idBcEq2EXxkd7fO9CAAozPOA/q
+p1x4EaTByIVcJdPTsuclzxFUl6s1wB52HO8AU5853BSlLCIls3Jy/I2z5T4IHhQq
+NwuIPMqw9MjCoa68wb4pZ1Xi/K1ZXP69VyywkI3C7Te2fJmItdUDmj0VDT06qKhF
+8JVOJVkdzZhpu9PMMsmN74H+rX2Ju7pgE8pllWeg8xn2A1bUatMn4qGtg/BKEiJ3
+HAVz4hlxQsDsdUaakFjgao4rpUYwBI4Zshfjvqm6f1bxJAPXsiEodg42MEx51UGa
+mqi4NboMOvJEGyCI98Ul1z3G4z5D3Yf+xOr1Uz5MZf87Sst4WmsXXw3Hw09Omiqi
+7VdNIuJGmj8PkTQkfVXjjJU30xrwCSss0smNtA0Aq2cpKNgB9RkEth2+dv5yXMSF
+ytKAQd8FqKPVhJBPC/PgP5sZ0jeJP/J7UhyM9uH3PAeXjA6iWYEMspA90+NZRu0P
+qafegGtaqge2Gcu8V/OXIXoMsSt0Puvap2ctTMSYnjYJdmZm/Bo/6khUHL4wvYBQ
+v3y1zgD2DGHZ5yQD4OMBgQ692IU0iL2yNqh7XAjlRICMb/gv1SHKHRzQ+8S1h9E6
+Tsd2tTVItQIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBSM+xx1
+vALTn04uSNn5YFSqxLNP+jAOBgNVHQ8BAf8EBAMCAQYwDQYJKoZIhvcNAQENBQAD
+ggIBAEii1QALLtA/vBzVtVRJHlpr9OTy4EA34MwUe7nJ+jW1dReTagVphZzNTxl4
+WxmB82M+w85bj/UvXgF2Ez8sALnNllI5SW0ETsXpD4YN4fqzX4IS8TrOZgYkNCvo
+zMrnadyHncI013nR03e4qllY/p0m+jiGPp2Kh2RX5Rc64vmNueMzeMGQ2Ljdt4NR
+5MTMI9UGfOZR0800McD2RrsLrfw9EAUqO0qRJe6M1ISHgCq8CYyqOhNf6DR5UMEQ
+GfnTKB7U0VEwKbOukGfWHwpjscWpxkIxYxeU72nLL/qMFH3EQxiJ2fAyQOaA4kZf
+5ePBAFmo+eggvIksDkc0C+pXwlM2/KfUrzHN/gLldfq5Jwn58/U7yn2fqSLLiMmq
+0Uc9NneoWWRrJ8/vJ8HjJLWG965+Mk2weWjROeiQWMODvA8s1pfrzgzhIMfatz7D
+P78v3DSk+yshzWePS/Tj6tQ/50+6uaWTRRxmHyH6ZF5v4HaUMst19W7l9o/HuKTM
+qJZ9ZPskWkoDbGs4xugDQ5r3V7mzKWmTOPQD8rv7gmsHINFSH5pkAnuYZttcTVoP
+0ISVoDwUQwbKytu4QTbaakRnh6+v40URFWkIsr4WOZckbxJF0WddCajJFdr60qZf
+E2Efv4WstK2tBZQIgx51F9NxO5NQI1mg7TyRVJ12AMXDuDjb
+-----END CERTIFICATE-----
+
+# Issuer: CN=TunTrust Root CA O=Agence Nationale de Certification Electronique
+# Subject: CN=TunTrust Root CA O=Agence Nationale de Certification Electronique
+# Label: "TunTrust Root CA"
+# Serial: 108534058042236574382096126452369648152337120275
+# MD5 Fingerprint: 85:13:b9:90:5b:36:5c:b6:5e:b8:5a:f8:e0:31:57:b4
+# SHA1 Fingerprint: cf:e9:70:84:0f:e0:73:0f:9d:f6:0c:7f:2c:4b:ee:20:46:34:9c:bb
+# SHA256 Fingerprint: 2e:44:10:2a:b5:8c:b8:54:19:45:1c:8e:19:d9:ac:f3:66:2c:af:bc:61:4b:6a:53:96:0a:30:f7:d0:e2:eb:41
+-----BEGIN CERTIFICATE-----
+MIIFszCCA5ugAwIBAgIUEwLV4kBMkkaGFmddtLu7sms+/BMwDQYJKoZIhvcNAQEL
+BQAwYTELMAkGA1UEBhMCVE4xNzA1BgNVBAoMLkFnZW5jZSBOYXRpb25hbGUgZGUg
+Q2VydGlmaWNhdGlvbiBFbGVjdHJvbmlxdWUxGTAXBgNVBAMMEFR1blRydXN0IFJv
+b3QgQ0EwHhcNMTkwNDI2MDg1NzU2WhcNNDQwNDI2MDg1NzU2WjBhMQswCQYDVQQG
+EwJUTjE3MDUGA1UECgwuQWdlbmNlIE5hdGlvbmFsZSBkZSBDZXJ0aWZpY2F0aW9u
+IEVsZWN0cm9uaXF1ZTEZMBcGA1UEAwwQVHVuVHJ1c3QgUm9vdCBDQTCCAiIwDQYJ
+KoZIhvcNAQEBBQADggIPADCCAgoCggIBAMPN0/y9BFPdDCA61YguBUtB9YOCfvdZ
+n56eY+hz2vYGqU8ftPkLHzmMmiDQfgbU7DTZhrx1W4eI8NLZ1KMKsmwb60ksPqxd
+2JQDoOw05TDENX37Jk0bbjBU2PWARZw5rZzJJQRNmpA+TkBuimvNKWfGzC3gdOgF
+VwpIUPp6Q9p+7FuaDmJ2/uqdHYVy7BG7NegfJ7/Boce7SBbdVtfMTqDhuazb1YMZ
+GoXRlJfXyqNlC/M4+QKu3fZnz8k/9YosRxqZbwUN/dAdgjH8KcwAWJeRTIAAHDOF
+li/LQcKLEITDCSSJH7UP2dl3RxiSlGBcx5kDPP73lad9UKGAwqmDrViWVSHbhlnU
+r8a83YFuB9tgYv7sEG7aaAH0gxupPqJbI9dkxt/con3YS7qC0lH4Zr8GRuR5KiY2
+eY8fTpkdso8MDhz/yV3A/ZAQprE38806JG60hZC/gLkMjNWb1sjxVj8agIl6qeIb
+MlEsPvLfe/ZdeikZjuXIvTZxi11Mwh0/rViizz1wTaZQmCXcI/m4WEEIcb9PuISg
+jwBUFfyRbVinljvrS5YnzWuioYasDXxU5mZMZl+QviGaAkYt5IPCgLnPSz7ofzwB
+7I9ezX/SKEIBlYrilz0QIX32nRzFNKHsLA4KUiwSVXAkPcvCFDVDXSdOvsC9qnyW
+5/yeYa1E0wCXAgMBAAGjYzBhMB0GA1UdDgQWBBQGmpsfU33x9aTI04Y+oXNZtPdE
+ITAPBgNVHRMBAf8EBTADAQH/MB8GA1UdIwQYMBaAFAaamx9TffH1pMjThj6hc1m0
+90QhMA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQsFAAOCAgEAqgVutt0Vyb+z
+xiD2BkewhpMl0425yAA/l/VSJ4hxyXT968pk21vvHl26v9Hr7lxpuhbI87mP0zYu
+QEkHDVneixCwSQXi/5E/S7fdAo74gShczNxtr18UnH1YeA32gAm56Q6XKRm4t+v4
+FstVEuTGfbvE7Pi1HE4+Z7/FXxttbUcoqgRYYdZ2vyJ/0Adqp2RT8JeNnYA/u8EH
+22Wv5psymsNUk8QcCMNE+3tjEUPRahphanltkE8pjkcFwRJpadbGNjHh/PqAulxP
+xOu3Mqz4dWEX1xAZufHSCe96Qp1bWgvUxpVOKs7/B9dPfhgGiPEZtdmYu65xxBzn
+dFlY7wyJz4sfdZMaBBSSSFCp61cpABbjNhzI+L/wM9VBD8TMPN3pM0MBkRArHtG5
+Xc0yGYuPjCB31yLEQtyEFpslbei0VXF/sHyz03FJuc9SpAQ/3D2gu68zngowYI7b
+nV2UqL1g52KAdoGDDIzMMEZJ4gzSqK/rYXHv5yJiqfdcZGyfFoxnNidF9Ql7v/YQ
+CvGwjVRDjAS6oz/v4jXH+XTgbzRB0L9zZVcg+ZtnemZoJE6AZb0QmQZZ8mWvuMZH
+u/2QeItBcy6vVR/cO5JyboTT0GFMDcx2V+IthSIVNg3rAZ3r2OvEhJn7wAzMMujj
+d9qDRIueVSjAi1jTkD5OGwDxFa2DK5o=
+-----END CERTIFICATE-----
+
+# Issuer: CN=HARICA TLS RSA Root CA 2021 O=Hellenic Academic and Research Institutions CA
+# Subject: CN=HARICA TLS RSA Root CA 2021 O=Hellenic Academic and Research Institutions CA
+# Label: "HARICA TLS RSA Root CA 2021"
+# Serial: 76817823531813593706434026085292783742
+# MD5 Fingerprint: 65:47:9b:58:86:dd:2c:f0:fc:a2:84:1f:1e:96:c4:91
+# SHA1 Fingerprint: 02:2d:05:82:fa:88:ce:14:0c:06:79:de:7f:14:10:e9:45:d7:a5:6d
+# SHA256 Fingerprint: d9:5d:0e:8e:da:79:52:5b:f9:be:b1:1b:14:d2:10:0d:32:94:98:5f:0c:62:d9:fa:bd:9c:d9:99:ec:cb:7b:1d
+-----BEGIN CERTIFICATE-----
+MIIFpDCCA4ygAwIBAgIQOcqTHO9D88aOk8f0ZIk4fjANBgkqhkiG9w0BAQsFADBs
+MQswCQYDVQQGEwJHUjE3MDUGA1UECgwuSGVsbGVuaWMgQWNhZGVtaWMgYW5kIFJl
+c2VhcmNoIEluc3RpdHV0aW9ucyBDQTEkMCIGA1UEAwwbSEFSSUNBIFRMUyBSU0Eg
+Um9vdCBDQSAyMDIxMB4XDTIxMDIxOTEwNTUzOFoXDTQ1MDIxMzEwNTUzN1owbDEL
+MAkGA1UEBhMCR1IxNzA1BgNVBAoMLkhlbGxlbmljIEFjYWRlbWljIGFuZCBSZXNl
+YXJjaCBJbnN0aXR1dGlvbnMgQ0ExJDAiBgNVBAMMG0hBUklDQSBUTFMgUlNBIFJv
+b3QgQ0EgMjAyMTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAIvC569l
+mwVnlskNJLnQDmT8zuIkGCyEf3dRywQRNrhe7Wlxp57kJQmXZ8FHws+RFjZiPTgE
+4VGC/6zStGndLuwRo0Xua2s7TL+MjaQenRG56Tj5eg4MmOIjHdFOY9TnuEFE+2uv
+a9of08WRiFukiZLRgeaMOVig1mlDqa2YUlhu2wr7a89o+uOkXjpFc5gH6l8Cct4M
+pbOfrqkdtx2z/IpZ525yZa31MJQjB/OCFks1mJxTuy/K5FrZx40d/JiZ+yykgmvw
+Kh+OC19xXFyuQnspiYHLA6OZyoieC0AJQTPb5lh6/a6ZcMBaD9YThnEvdmn8kN3b
+LW7R8pv1GmuebxWMevBLKKAiOIAkbDakO/IwkfN4E8/BPzWr8R0RI7VDIp4BkrcY
+AuUR0YLbFQDMYTfBKnya4dC6s1BG7oKsnTH4+yPiAwBIcKMJJnkVU2DzOFytOOqB
+AGMUuTNe3QvboEUHGjMJ+E20pwKmafTCWQWIZYVWrkvL4N48fS0ayOn7H6NhStYq
+E613TBoYm5EPWNgGVMWX+Ko/IIqmhaZ39qb8HOLubpQzKoNQhArlT4b4UEV4AIHr
+W2jjJo3Me1xR9BQsQL4aYB16cmEdH2MtiKrOokWQCPxrvrNQKlr9qEgYRtaQQJKQ
+CoReaDH46+0N0x3GfZkYVVYnZS6NRcUk7M7jAgMBAAGjQjBAMA8GA1UdEwEB/wQF
+MAMBAf8wHQYDVR0OBBYEFApII6ZgpJIKM+qTW8VX6iVNvRLuMA4GA1UdDwEB/wQE
+AwIBhjANBgkqhkiG9w0BAQsFAAOCAgEAPpBIqm5iFSVmewzVjIuJndftTgfvnNAU
+X15QvWiWkKQUEapobQk1OUAJ2vQJLDSle1mESSmXdMgHHkdt8s4cUCbjnj1AUz/3
+f5Z2EMVGpdAgS1D0NTsY9FVqQRtHBmg8uwkIYtlfVUKqrFOFrJVWNlar5AWMxaja
+H6NpvVMPxP/cyuN+8kyIhkdGGvMA9YCRotxDQpSbIPDRzbLrLFPCU3hKTwSUQZqP
+JzLB5UkZv/HywouoCjkxKLR9YjYsTewfM7Z+d21+UPCfDtcRj88YxeMn/ibvBZ3P
+zzfF0HvaO7AWhAw6k9a+F9sPPg4ZeAnHqQJyIkv3N3a6dcSFA1pj1bF1BcK5vZSt
+jBWZp5N99sXzqnTPBIWUmAD04vnKJGW/4GKvyMX6ssmeVkjaef2WdhW+o45WxLM0
+/L5H9MG0qPzVMIho7suuyWPEdr6sOBjhXlzPrjoiUevRi7PzKzMHVIf6tLITe7pT
+BGIBnfHAT+7hOtSLIBD6Alfm78ELt5BGnBkpjNxvoEppaZS3JGWg/6w/zgH7IS79
+aPib8qXPMThcFarmlwDB31qlpzmq6YR/PFGoOtmUW4y/Twhx5duoXNTSpv4Ao8YW
+xw/ogM4cKGR0GQjTQuPOAF1/sdwTsOEFy9EgqoZ0njnnkf3/W9b3raYvAwtt41dU
+63ZTGI0RmLo=
+-----END CERTIFICATE-----
+
+# Issuer: CN=HARICA TLS ECC Root CA 2021 O=Hellenic Academic and Research Institutions CA
+# Subject: CN=HARICA TLS ECC Root CA 2021 O=Hellenic Academic and Research Institutions CA
+# Label: "HARICA TLS ECC Root CA 2021"
+# Serial: 137515985548005187474074462014555733966
+# MD5 Fingerprint: ae:f7:4c:e5:66:35:d1:b7:9b:8c:22:93:74:d3:4b:b0
+# SHA1 Fingerprint: bc:b0:c1:9d:e9:98:92:70:19:38:57:e9:8d:a7:b4:5d:6e:ee:01:48
+# SHA256 Fingerprint: 3f:99:cc:47:4a:cf:ce:4d:fe:d5:87:94:66:5e:47:8d:15:47:73:9f:2e:78:0f:1b:b4:ca:9b:13:30:97:d4:01
+-----BEGIN CERTIFICATE-----
+MIICVDCCAdugAwIBAgIQZ3SdjXfYO2rbIvT/WeK/zjAKBggqhkjOPQQDAzBsMQsw
+CQYDVQQGEwJHUjE3MDUGA1UECgwuSGVsbGVuaWMgQWNhZGVtaWMgYW5kIFJlc2Vh
+cmNoIEluc3RpdHV0aW9ucyBDQTEkMCIGA1UEAwwbSEFSSUNBIFRMUyBFQ0MgUm9v
+dCBDQSAyMDIxMB4XDTIxMDIxOTExMDExMFoXDTQ1MDIxMzExMDEwOVowbDELMAkG
+A1UEBhMCR1IxNzA1BgNVBAoMLkhlbGxlbmljIEFjYWRlbWljIGFuZCBSZXNlYXJj
+aCBJbnN0aXR1dGlvbnMgQ0ExJDAiBgNVBAMMG0hBUklDQSBUTFMgRUNDIFJvb3Qg
+Q0EgMjAyMTB2MBAGByqGSM49AgEGBSuBBAAiA2IABDgI/rGgltJ6rK9JOtDA4MM7
+KKrxcm1lAEeIhPyaJmuqS7psBAqIXhfyVYf8MLA04jRYVxqEU+kw2anylnTDUR9Y
+STHMmE5gEYd103KUkE+bECUqqHgtvpBBWJAVcqeht6NCMEAwDwYDVR0TAQH/BAUw
+AwEB/zAdBgNVHQ4EFgQUyRtTgRL+BNUW0aq8mm+3oJUZbsowDgYDVR0PAQH/BAQD
+AgGGMAoGCCqGSM49BAMDA2cAMGQCMBHervjcToiwqfAircJRQO9gcS3ujwLEXQNw
+SaSS6sUUiHCm0w2wqsosQJz76YJumgIwK0eaB8bRwoF8yguWGEEbo/QwCZ61IygN
+nxS2PFOiTAZpffpskcYqSUXm7LcT4Tps
+-----END CERTIFICATE-----
+
+# Issuer: CN=Autoridad de Certificacion Firmaprofesional CIF A62634068
+# Subject: CN=Autoridad de Certificacion Firmaprofesional CIF A62634068
+# Label: "Autoridad de Certificacion Firmaprofesional CIF A62634068"
+# Serial: 1977337328857672817
+# MD5 Fingerprint: 4e:6e:9b:54:4c:ca:b7:fa:48:e4:90:b1:15:4b:1c:a3
+# SHA1 Fingerprint: 0b:be:c2:27:22:49:cb:39:aa:db:35:5c:53:e3:8c:ae:78:ff:b6:fe
+# SHA256 Fingerprint: 57:de:05:83:ef:d2:b2:6e:03:61:da:99:da:9d:f4:64:8d:ef:7e:e8:44:1c:3b:72:8a:fa:9b:cd:e0:f9:b2:6a
+-----BEGIN CERTIFICATE-----
+MIIGFDCCA/ygAwIBAgIIG3Dp0v+ubHEwDQYJKoZIhvcNAQELBQAwUTELMAkGA1UE
+BhMCRVMxQjBABgNVBAMMOUF1dG9yaWRhZCBkZSBDZXJ0aWZpY2FjaW9uIEZpcm1h
+cHJvZmVzaW9uYWwgQ0lGIEE2MjYzNDA2ODAeFw0xNDA5MjMxNTIyMDdaFw0zNjA1
+MDUxNTIyMDdaMFExCzAJBgNVBAYTAkVTMUIwQAYDVQQDDDlBdXRvcmlkYWQgZGUg
+Q2VydGlmaWNhY2lvbiBGaXJtYXByb2Zlc2lvbmFsIENJRiBBNjI2MzQwNjgwggIi
+MA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDKlmuO6vj78aI14H9M2uDDUtd9
+thDIAl6zQyrET2qyyhxdKJp4ERppWVevtSBC5IsP5t9bpgOSL/UR5GLXMnE42QQM
+cas9UX4PB99jBVzpv5RvwSmCwLTaUbDBPLutN0pcyvFLNg4kq7/DhHf9qFD0sefG
+L9ItWY16Ck6WaVICqjaY7Pz6FIMMNx/Jkjd/14Et5cS54D40/mf0PmbR0/RAz15i
+NA9wBj4gGFrO93IbJWyTdBSTo3OxDqqHECNZXyAFGUftaI6SEspd/NYrspI8IM/h
+X68gvqB2f3bl7BqGYTM+53u0P6APjqK5am+5hyZvQWyIplD9amML9ZMWGxmPsu2b
+m8mQ9QEM3xk9Dz44I8kvjwzRAv4bVdZO0I08r0+k8/6vKtMFnXkIoctXMbScyJCy
+Z/QYFpM6/EfY0XiWMR+6KwxfXZmtY4laJCB22N/9q06mIqqdXuYnin1oKaPnirja
+EbsXLZmdEyRG98Xi2J+Of8ePdG1asuhy9azuJBCtLxTa/y2aRnFHvkLfuwHb9H/T
+KI8xWVvTyQKmtFLKbpf7Q8UIJm+K9Lv9nyiqDdVF8xM6HdjAeI9BZzwelGSuewvF
+6NkBiDkal4ZkQdU7hwxu+g/GvUgUvzlN1J5Bto+WHWOWk9mVBngxaJ43BjuAiUVh
+OSPHG0SjFeUc+JIwuwIDAQABo4HvMIHsMB0GA1UdDgQWBBRlzeurNR4APn7VdMAc
+tHNHDhpkLzASBgNVHRMBAf8ECDAGAQH/AgEBMIGmBgNVHSAEgZ4wgZswgZgGBFUd
+IAAwgY8wLwYIKwYBBQUHAgEWI2h0dHA6Ly93d3cuZmlybWFwcm9mZXNpb25hbC5j
+b20vY3BzMFwGCCsGAQUFBwICMFAeTgBQAGEAcwBlAG8AIABkAGUAIABsAGEAIABC
+AG8AbgBhAG4AbwB2AGEAIAA0ADcAIABCAGEAcgBjAGUAbABvAG4AYQAgADAAOAAw
+ADEANzAOBgNVHQ8BAf8EBAMCAQYwDQYJKoZIhvcNAQELBQADggIBAHSHKAIrdx9m
+iWTtj3QuRhy7qPj4Cx2Dtjqn6EWKB7fgPiDL4QjbEwj4KKE1soCzC1HA01aajTNF
+Sa9J8OA9B3pFE1r/yJfY0xgsfZb43aJlQ3CTkBW6kN/oGbDbLIpgD7dvlAceHabJ
+hfa9NPhAeGIQcDq+fUs5gakQ1JZBu/hfHAsdCPKxsIl68veg4MSPi3i1O1ilI45P
+Vf42O+AMt8oqMEEgtIDNrvx2ZnOorm7hfNoD6JQg5iKj0B+QXSBTFCZX2lSX3xZE
+EAEeiGaPcjiT3SC3NL7X8e5jjkd5KAb881lFJWAiMxujX6i6KtoaPc1A6ozuBRWV
+1aUsIC+nmCjuRfzxuIgALI9C2lHVnOUTaHFFQ4ueCyE8S1wF3BqfmI7avSKecs2t
+CsvMo2ebKHTEm9caPARYpoKdrcd7b/+Alun4jWq9GJAd/0kakFI3ky88Al2CdgtR
+5xbHV/g4+afNmyJU72OwFW1TZQNKXkqgsqeOSQBZONXH9IBk9W6VULgRfhVwOEqw
+f9DEMnDAGf/JOC0ULGb0QkTmVXYbgBVX/8Cnp6o5qtjTcNAuuuuUavpfNIbnYrX9
+ivAwhZTJryQCL2/W3Wf+47BVTwSYT6RBVuKT0Gro1vP7ZeDOdcQxWQzugsgMYDNK
+GbqEZycPvEJdvSRUDewdcAZfpLz6IHxV
+-----END CERTIFICATE-----
+
+# Issuer: CN=vTrus ECC Root CA O=iTrusChina Co.,Ltd.
+# Subject: CN=vTrus ECC Root CA O=iTrusChina Co.,Ltd.
+# Label: "vTrus ECC Root CA"
+# Serial: 630369271402956006249506845124680065938238527194
+# MD5 Fingerprint: de:4b:c1:f5:52:8c:9b:43:e1:3e:8f:55:54:17:8d:85
+# SHA1 Fingerprint: f6:9c:db:b0:fc:f6:02:13:b6:52:32:a6:a3:91:3f:16:70:da:c3:e1
+# SHA256 Fingerprint: 30:fb:ba:2c:32:23:8e:2a:98:54:7a:f9:79:31:e5:50:42:8b:9b:3f:1c:8e:eb:66:33:dc:fa:86:c5:b2:7d:d3
+-----BEGIN CERTIFICATE-----
+MIICDzCCAZWgAwIBAgIUbmq8WapTvpg5Z6LSa6Q75m0c1towCgYIKoZIzj0EAwMw
+RzELMAkGA1UEBhMCQ04xHDAaBgNVBAoTE2lUcnVzQ2hpbmEgQ28uLEx0ZC4xGjAY
+BgNVBAMTEXZUcnVzIEVDQyBSb290IENBMB4XDTE4MDczMTA3MjY0NFoXDTQzMDcz
+MTA3MjY0NFowRzELMAkGA1UEBhMCQ04xHDAaBgNVBAoTE2lUcnVzQ2hpbmEgQ28u
+LEx0ZC4xGjAYBgNVBAMTEXZUcnVzIEVDQyBSb290IENBMHYwEAYHKoZIzj0CAQYF
+K4EEACIDYgAEZVBKrox5lkqqHAjDo6LN/llWQXf9JpRCux3NCNtzslt188+cToL0
+v/hhJoVs1oVbcnDS/dtitN9Ti72xRFhiQgnH+n9bEOf+QP3A2MMrMudwpremIFUd
+e4BdS49nTPEQo0IwQDAdBgNVHQ4EFgQUmDnNvtiyjPeyq+GtJK97fKHbH88wDwYD
+VR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwCgYIKoZIzj0EAwMDaAAwZQIw
+V53dVvHH4+m4SVBrm2nDb+zDfSXkV5UTQJtS0zvzQBm8JsctBp61ezaf9SXUY2sA
+AjEA6dPGnlaaKsyh2j/IZivTWJwghfqrkYpwcBE4YGQLYgmRWAD5Tfs0aNoJrSEG
+GJTO
+-----END CERTIFICATE-----
+
+# Issuer: CN=vTrus Root CA O=iTrusChina Co.,Ltd.
+# Subject: CN=vTrus Root CA O=iTrusChina Co.,Ltd.
+# Label: "vTrus Root CA"
+# Serial: 387574501246983434957692974888460947164905180485
+# MD5 Fingerprint: b8:c9:37:df:fa:6b:31:84:64:c5:ea:11:6a:1b:75:fc
+# SHA1 Fingerprint: 84:1a:69:fb:f5:cd:1a:25:34:13:3d:e3:f8:fc:b8:99:d0:c9:14:b7
+# SHA256 Fingerprint: 8a:71:de:65:59:33:6f:42:6c:26:e5:38:80:d0:0d:88:a1:8d:a4:c6:a9:1f:0d:cb:61:94:e2:06:c5:c9:63:87
+-----BEGIN CERTIFICATE-----
+MIIFVjCCAz6gAwIBAgIUQ+NxE9izWRRdt86M/TX9b7wFjUUwDQYJKoZIhvcNAQEL
+BQAwQzELMAkGA1UEBhMCQ04xHDAaBgNVBAoTE2lUcnVzQ2hpbmEgQ28uLEx0ZC4x
+FjAUBgNVBAMTDXZUcnVzIFJvb3QgQ0EwHhcNMTgwNzMxMDcyNDA1WhcNNDMwNzMx
+MDcyNDA1WjBDMQswCQYDVQQGEwJDTjEcMBoGA1UEChMTaVRydXNDaGluYSBDby4s
+THRkLjEWMBQGA1UEAxMNdlRydXMgUm9vdCBDQTCCAiIwDQYJKoZIhvcNAQEBBQAD
+ggIPADCCAgoCggIBAL1VfGHTuB0EYgWgrmy3cLRB6ksDXhA/kFocizuwZotsSKYc
+IrrVQJLuM7IjWcmOvFjai57QGfIvWcaMY1q6n6MLsLOaXLoRuBLpDLvPbmyAhykU
+AyyNJJrIZIO1aqwTLDPxn9wsYTwaP3BVm60AUn/PBLn+NvqcwBauYv6WTEN+VRS+
+GrPSbcKvdmaVayqwlHeFXgQPYh1jdfdr58tbmnDsPmcF8P4HCIDPKNsFxhQnL4Z9
+8Cfe/+Z+M0jnCx5Y0ScrUw5XSmXX+6KAYPxMvDVTAWqXcoKv8R1w6Jz1717CbMdH
+flqUhSZNO7rrTOiwCcJlwp2dCZtOtZcFrPUGoPc2BX70kLJrxLT5ZOrpGgrIDajt
+J8nU57O5q4IikCc9Kuh8kO+8T/3iCiSn3mUkpF3qwHYw03dQ+A0Em5Q2AXPKBlim
+0zvc+gRGE1WKyURHuFE5Gi7oNOJ5y1lKCn+8pu8fA2dqWSslYpPZUxlmPCdiKYZN
+pGvu/9ROutW04o5IWgAZCfEF2c6Rsffr6TlP9m8EQ5pV9T4FFL2/s1m02I4zhKOQ
+UqqzApVg+QxMaPnu1RcN+HFXtSXkKe5lXa/R7jwXC1pDxaWG6iSe4gUH3DRCEpHW
+OXSuTEGC2/KmSNGzm/MzqvOmwMVO9fSddmPmAsYiS8GVP1BkLFTltvA8Kc9XAgMB
+AAGjQjBAMB0GA1UdDgQWBBRUYnBj8XWEQ1iO0RYgscasGrz2iTAPBgNVHRMBAf8E
+BTADAQH/MA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQsFAAOCAgEAKbqSSaet
+8PFww+SX8J+pJdVrnjT+5hpk9jprUrIQeBqfTNqK2uwcN1LgQkv7bHbKJAs5EhWd
+nxEt/Hlk3ODg9d3gV8mlsnZwUKT+twpw1aA08XXXTUm6EdGz2OyC/+sOxL9kLX1j
+bhd47F18iMjrjld22VkE+rxSH0Ws8HqA7Oxvdq6R2xCOBNyS36D25q5J08FsEhvM
+Kar5CKXiNxTKsbhm7xqC5PD48acWabfbqWE8n/Uxy+QARsIvdLGx14HuqCaVvIiv
+TDUHKgLKeBRtRytAVunLKmChZwOgzoy8sHJnxDHO2zTlJQNgJXtxmOTAGytfdELS
+S8VZCAeHvsXDf+eW2eHcKJfWjwXj9ZtOyh1QRwVTsMo554WgicEFOwE30z9J4nfr
+I8iIZjs9OXYhRvHsXyO466JmdXTBQPfYaJqT4i2pLr0cox7IdMakLXogqzu4sEb9
+b91fUlV1YvCXoHzXOP0l382gmxDPi7g4Xl7FtKYCNqEeXxzP4padKar9mK5S4fNB
+UvupLnKWnyfjqnN9+BojZns7q2WwMgFLFT49ok8MKzWixtlnEjUwzXYuFrOZnk1P
+Ti07NEPhmg4NpGaXutIcSkwsKouLgU9xGqndXHt7CMUADTdA43x7VF8vhV929ven
+sBxXVsFy6K2ir40zSbofitzmdHxghm+Hl3s=
+-----END CERTIFICATE-----
+
+# Issuer: CN=ISRG Root X2 O=Internet Security Research Group
+# Subject: CN=ISRG Root X2 O=Internet Security Research Group
+# Label: "ISRG Root X2"
+# Serial: 87493402998870891108772069816698636114
+# MD5 Fingerprint: d3:9e:c4:1e:23:3c:a6:df:cf:a3:7e:6d:e0:14:e6:e5
+# SHA1 Fingerprint: bd:b1:b9:3c:d5:97:8d:45:c6:26:14:55:f8:db:95:c7:5a:d1:53:af
+# SHA256 Fingerprint: 69:72:9b:8e:15:a8:6e:fc:17:7a:57:af:b7:17:1d:fc:64:ad:d2:8c:2f:ca:8c:f1:50:7e:34:45:3c:cb:14:70
+-----BEGIN CERTIFICATE-----
+MIICGzCCAaGgAwIBAgIQQdKd0XLq7qeAwSxs6S+HUjAKBggqhkjOPQQDAzBPMQsw
+CQYDVQQGEwJVUzEpMCcGA1UEChMgSW50ZXJuZXQgU2VjdXJpdHkgUmVzZWFyY2gg
+R3JvdXAxFTATBgNVBAMTDElTUkcgUm9vdCBYMjAeFw0yMDA5MDQwMDAwMDBaFw00
+MDA5MTcxNjAwMDBaME8xCzAJBgNVBAYTAlVTMSkwJwYDVQQKEyBJbnRlcm5ldCBT
+ZWN1cml0eSBSZXNlYXJjaCBHcm91cDEVMBMGA1UEAxMMSVNSRyBSb290IFgyMHYw
+EAYHKoZIzj0CAQYFK4EEACIDYgAEzZvVn4CDCuwJSvMWSj5cz3es3mcFDR0HttwW
++1qLFNvicWDEukWVEYmO6gbf9yoWHKS5xcUy4APgHoIYOIvXRdgKam7mAHf7AlF9
+ItgKbppbd9/w+kHsOdx1ymgHDB/qo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0T
+AQH/BAUwAwEB/zAdBgNVHQ4EFgQUfEKWrt5LSDv6kviejM9ti6lyN5UwCgYIKoZI
+zj0EAwMDaAAwZQIwe3lORlCEwkSHRhtFcP9Ymd70/aTSVaYgLXTWNLxBo1BfASdW
+tL4ndQavEi51mI38AjEAi/V3bNTIZargCyzuFJ0nN6T5U6VR5CmD1/iQMVtCnwr1
+/q4AaOeMSQ+2b1tbFfLn
+-----END CERTIFICATE-----
+
+# Issuer: CN=HiPKI Root CA - G1 O=Chunghwa Telecom Co., Ltd.
+# Subject: CN=HiPKI Root CA - G1 O=Chunghwa Telecom Co., Ltd.
+# Label: "HiPKI Root CA - G1"
+# Serial: 60966262342023497858655262305426234976
+# MD5 Fingerprint: 69:45:df:16:65:4b:e8:68:9a:8f:76:5f:ff:80:9e:d3
+# SHA1 Fingerprint: 6a:92:e4:a8:ee:1b:ec:96:45:37:e3:29:57:49:cd:96:e3:e5:d2:60
+# SHA256 Fingerprint: f0:15:ce:3c:c2:39:bf:ef:06:4b:e9:f1:d2:c4:17:e1:a0:26:4a:0a:94:be:1f:0c:8d:12:18:64:eb:69:49:cc
+-----BEGIN CERTIFICATE-----
+MIIFajCCA1KgAwIBAgIQLd2szmKXlKFD6LDNdmpeYDANBgkqhkiG9w0BAQsFADBP
+MQswCQYDVQQGEwJUVzEjMCEGA1UECgwaQ2h1bmdod2EgVGVsZWNvbSBDby4sIEx0
+ZC4xGzAZBgNVBAMMEkhpUEtJIFJvb3QgQ0EgLSBHMTAeFw0xOTAyMjIwOTQ2MDRa
+Fw0zNzEyMzExNTU5NTlaME8xCzAJBgNVBAYTAlRXMSMwIQYDVQQKDBpDaHVuZ2h3
+YSBUZWxlY29tIENvLiwgTHRkLjEbMBkGA1UEAwwSSGlQS0kgUm9vdCBDQSAtIEcx
+MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEA9B5/UnMyDHPkvRN0o9Qw
+qNCuS9i233VHZvR85zkEHmpwINJaR3JnVfSl6J3VHiGh8Ge6zCFovkRTv4354twv
+Vcg3Px+kwJyz5HdcoEb+d/oaoDjq7Zpy3iu9lFc6uux55199QmQ5eiY29yTw1S+6
+lZgRZq2XNdZ1AYDgr/SEYYwNHl98h5ZeQa/rh+r4XfEuiAU+TCK72h8q3VJGZDnz
+Qs7ZngyzsHeXZJzA9KMuH5UHsBffMNsAGJZMoYFL3QRtU6M9/Aes1MU3guvklQgZ
+KILSQjqj2FPseYlgSGDIcpJQ3AOPgz+yQlda22rpEZfdhSi8MEyr48KxRURHH+CK
+FgeW0iEPU8DtqX7UTuybCeyvQqww1r/REEXgphaypcXTT3OUM3ECoWqj1jOXTyFj
+HluP2cFeRXF3D4FdXyGarYPM+l7WjSNfGz1BryB1ZlpK9p/7qxj3ccC2HTHsOyDr
+y+K49a6SsvfhhEvyovKTmiKe0xRvNlS9H15ZFblzqMF8b3ti6RZsR1pl8w4Rm0bZ
+/W3c1pzAtH2lsN0/Vm+h+fbkEkj9Bn8SV7apI09bA8PgcSojt/ewsTu8mL3WmKgM
+a/aOEmem8rJY5AIJEzypuxC00jBF8ez3ABHfZfjcK0NVvxaXxA/VLGGEqnKG/uY6
+fsI/fe78LxQ+5oXdUG+3Se0CAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAdBgNV
+HQ4EFgQU8ncX+l6o/vY9cdVouslGDDjYr7AwDgYDVR0PAQH/BAQDAgGGMA0GCSqG
+SIb3DQEBCwUAA4ICAQBQUfB13HAE4/+qddRxosuej6ip0691x1TPOhwEmSKsxBHi
+7zNKpiMdDg1H2DfHb680f0+BazVP6XKlMeJ45/dOlBhbQH3PayFUhuaVevvGyuqc
+SE5XCV0vrPSltJczWNWseanMX/mF+lLFjfiRFOs6DRfQUsJ748JzjkZ4Bjgs6Fza
+ZsT0pPBWGTMpWmWSBUdGSquEwx4noR8RkpkndZMPvDY7l1ePJlsMu5wP1G4wB9Tc
+XzZoZjmDlicmisjEOf6aIW/Vcobpf2Lll07QJNBAsNB1CI69aO4I1258EHBGG3zg
+iLKecoaZAeO/n0kZtCW+VmWuF2PlHt/o/0elv+EmBYTksMCv5wiZqAxeJoBF1Pho
+L5aPruJKHJwWDBNvOIf2u8g0X5IDUXlwpt/L9ZlNec1OvFefQ05rLisY+GpzjLrF
+Ne85akEez3GoorKGB1s6yeHvP2UEgEcyRHCVTjFnanRbEEV16rCf0OY1/k6fi8wr
+kkVbbiVghUbN0aqwdmaTd5a+g744tiROJgvM7XpWGuDpWsZkrUx6AEhEL7lAuxM+
+vhV4nYWBSipX3tUZQ9rbyltHhoMLP7YNdnhzeSJesYAfz77RP1YQmCuVh6EfnWQU
+YDksswBVLuT1sw5XxJFBAJw/6KXf6vb/yPCtbVKoF6ubYfwSUTXkJf2vqmqGOQ==
+-----END CERTIFICATE-----
+
+# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign ECC Root CA - R4
+# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign ECC Root CA - R4
+# Label: "GlobalSign ECC Root CA - R4"
+# Serial: 159662223612894884239637590694
+# MD5 Fingerprint: 26:29:f8:6d:e1:88:bf:a2:65:7f:aa:c4:cd:0f:7f:fc
+# SHA1 Fingerprint: 6b:a0:b0:98:e1:71:ef:5a:ad:fe:48:15:80:77:10:f4:bd:6f:0b:28
+# SHA256 Fingerprint: b0:85:d7:0b:96:4f:19:1a:73:e4:af:0d:54:ae:7a:0e:07:aa:fd:af:9b:71:dd:08:62:13:8a:b7:32:5a:24:a2
+-----BEGIN CERTIFICATE-----
+MIIB3DCCAYOgAwIBAgINAgPlfvU/k/2lCSGypjAKBggqhkjOPQQDAjBQMSQwIgYD
+VQQLExtHbG9iYWxTaWduIEVDQyBSb290IENBIC0gUjQxEzARBgNVBAoTCkdsb2Jh
+bFNpZ24xEzARBgNVBAMTCkdsb2JhbFNpZ24wHhcNMTIxMTEzMDAwMDAwWhcNMzgw
+MTE5MDMxNDA3WjBQMSQwIgYDVQQLExtHbG9iYWxTaWduIEVDQyBSb290IENBIC0g
+UjQxEzARBgNVBAoTCkdsb2JhbFNpZ24xEzARBgNVBAMTCkdsb2JhbFNpZ24wWTAT
+BgcqhkjOPQIBBggqhkjOPQMBBwNCAAS4xnnTj2wlDp8uORkcA6SumuU5BwkWymOx
+uYb4ilfBV85C+nOh92VC/x7BALJucw7/xyHlGKSq2XE/qNS5zowdo0IwQDAOBgNV
+HQ8BAf8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUVLB7rUW44kB/
++wpu+74zyTyjhNUwCgYIKoZIzj0EAwIDRwAwRAIgIk90crlgr/HmnKAWBVBfw147
+bmF0774BxL4YSFlhgjICICadVGNA3jdgUM/I2O2dgq43mLyjj0xMqTQrbO/7lZsm
+-----END CERTIFICATE-----
+
+# Issuer: CN=GTS Root R1 O=Google Trust Services LLC
+# Subject: CN=GTS Root R1 O=Google Trust Services LLC
+# Label: "GTS Root R1"
+# Serial: 159662320309726417404178440727
+# MD5 Fingerprint: 05:fe:d0:bf:71:a8:a3:76:63:da:01:e0:d8:52:dc:40
+# SHA1 Fingerprint: e5:8c:1c:c4:91:3b:38:63:4b:e9:10:6e:e3:ad:8e:6b:9d:d9:81:4a
+# SHA256 Fingerprint: d9:47:43:2a:bd:e7:b7:fa:90:fc:2e:6b:59:10:1b:12:80:e0:e1:c7:e4:e4:0f:a3:c6:88:7f:ff:57:a7:f4:cf
+-----BEGIN CERTIFICATE-----
+MIIFVzCCAz+gAwIBAgINAgPlk28xsBNJiGuiFzANBgkqhkiG9w0BAQwFADBHMQsw
+CQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZpY2VzIExMQzEU
+MBIGA1UEAxMLR1RTIFJvb3QgUjEwHhcNMTYwNjIyMDAwMDAwWhcNMzYwNjIyMDAw
+MDAwWjBHMQswCQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZp
+Y2VzIExMQzEUMBIGA1UEAxMLR1RTIFJvb3QgUjEwggIiMA0GCSqGSIb3DQEBAQUA
+A4ICDwAwggIKAoICAQC2EQKLHuOhd5s73L+UPreVp0A8of2C+X0yBoJx9vaMf/vo
+27xqLpeXo4xL+Sv2sfnOhB2x+cWX3u+58qPpvBKJXqeqUqv4IyfLpLGcY9vXmX7w
+Cl7raKb0xlpHDU0QM+NOsROjyBhsS+z8CZDfnWQpJSMHobTSPS5g4M/SCYe7zUjw
+TcLCeoiKu7rPWRnWr4+wB7CeMfGCwcDfLqZtbBkOtdh+JhpFAz2weaSUKK0Pfybl
+qAj+lug8aJRT7oM6iCsVlgmy4HqMLnXWnOunVmSPlk9orj2XwoSPwLxAwAtcvfaH
+szVsrBhQf4TgTM2S0yDpM7xSma8ytSmzJSq0SPly4cpk9+aCEI3oncKKiPo4Zor8
+Y/kB+Xj9e1x3+naH+uzfsQ55lVe0vSbv1gHR6xYKu44LtcXFilWr06zqkUspzBmk
+MiVOKvFlRNACzqrOSbTqn3yDsEB750Orp2yjj32JgfpMpf/VjsPOS+C12LOORc92
+wO1AK/1TD7Cn1TsNsYqiA94xrcx36m97PtbfkSIS5r762DL8EGMUUXLeXdYWk70p
+aDPvOmbsB4om3xPXV2V4J95eSRQAogB/mqghtqmxlbCluQ0WEdrHbEg8QOB+DVrN
+VjzRlwW5y0vtOUucxD/SVRNuJLDWcfr0wbrM7Rv1/oFB2ACYPTrIrnqYNxgFlQID
+AQABo0IwQDAOBgNVHQ8BAf8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4E
+FgQU5K8rJnEaK0gnhS9SZizv8IkTcT4wDQYJKoZIhvcNAQEMBQADggIBAJ+qQibb
+C5u+/x6Wki4+omVKapi6Ist9wTrYggoGxval3sBOh2Z5ofmmWJyq+bXmYOfg6LEe
+QkEzCzc9zolwFcq1JKjPa7XSQCGYzyI0zzvFIoTgxQ6KfF2I5DUkzps+GlQebtuy
+h6f88/qBVRRiClmpIgUxPoLW7ttXNLwzldMXG+gnoot7TiYaelpkttGsN/H9oPM4
+7HLwEXWdyzRSjeZ2axfG34arJ45JK3VmgRAhpuo+9K4l/3wV3s6MJT/KYnAK9y8J
+ZgfIPxz88NtFMN9iiMG1D53Dn0reWVlHxYciNuaCp+0KueIHoI17eko8cdLiA6Ef
+MgfdG+RCzgwARWGAtQsgWSl4vflVy2PFPEz0tv/bal8xa5meLMFrUKTX5hgUvYU/
+Z6tGn6D/Qqc6f1zLXbBwHSs09dR2CQzreExZBfMzQsNhFRAbd03OIozUhfJFfbdT
+6u9AWpQKXCBfTkBdYiJ23//OYb2MI3jSNwLgjt7RETeJ9r/tSQdirpLsQBqvFAnZ
+0E6yove+7u7Y/9waLd64NnHi/Hm3lCXRSHNboTXns5lndcEZOitHTtNCjv0xyBZm
+2tIMPNuzjsmhDYAPexZ3FL//2wmUspO8IFgV6dtxQ/PeEMMA3KgqlbbC1j+Qa3bb
+bP6MvPJwNQzcmRk13NfIRmPVNnGuV/u3gm3c
+-----END CERTIFICATE-----
+
+# Issuer: CN=GTS Root R2 O=Google Trust Services LLC
+# Subject: CN=GTS Root R2 O=Google Trust Services LLC
+# Label: "GTS Root R2"
+# Serial: 159662449406622349769042896298
+# MD5 Fingerprint: 1e:39:c0:53:e6:1e:29:82:0b:ca:52:55:36:5d:57:dc
+# SHA1 Fingerprint: 9a:44:49:76:32:db:de:fa:d0:bc:fb:5a:7b:17:bd:9e:56:09:24:94
+# SHA256 Fingerprint: 8d:25:cd:97:22:9d:bf:70:35:6b:da:4e:b3:cc:73:40:31:e2:4c:f0:0f:af:cf:d3:2d:c7:6e:b5:84:1c:7e:a8
+-----BEGIN CERTIFICATE-----
+MIIFVzCCAz+gAwIBAgINAgPlrsWNBCUaqxElqjANBgkqhkiG9w0BAQwFADBHMQsw
+CQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZpY2VzIExMQzEU
+MBIGA1UEAxMLR1RTIFJvb3QgUjIwHhcNMTYwNjIyMDAwMDAwWhcNMzYwNjIyMDAw
+MDAwWjBHMQswCQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZp
+Y2VzIExMQzEUMBIGA1UEAxMLR1RTIFJvb3QgUjIwggIiMA0GCSqGSIb3DQEBAQUA
+A4ICDwAwggIKAoICAQDO3v2m++zsFDQ8BwZabFn3GTXd98GdVarTzTukk3LvCvpt
+nfbwhYBboUhSnznFt+4orO/LdmgUud+tAWyZH8QiHZ/+cnfgLFuv5AS/T3KgGjSY
+6Dlo7JUle3ah5mm5hRm9iYz+re026nO8/4Piy33B0s5Ks40FnotJk9/BW9BuXvAu
+MC6C/Pq8tBcKSOWIm8Wba96wyrQD8Nr0kLhlZPdcTK3ofmZemde4wj7I0BOdre7k
+RXuJVfeKH2JShBKzwkCX44ofR5GmdFrS+LFjKBC4swm4VndAoiaYecb+3yXuPuWg
+f9RhD1FLPD+M2uFwdNjCaKH5wQzpoeJ/u1U8dgbuak7MkogwTZq9TwtImoS1mKPV
++3PBV2HdKFZ1E66HjucMUQkQdYhMvI35ezzUIkgfKtzra7tEscszcTJGr61K8Yzo
+dDqs5xoic4DSMPclQsciOzsSrZYuxsN2B6ogtzVJV+mSSeh2FnIxZyuWfoqjx5RW
+Ir9qS34BIbIjMt/kmkRtWVtd9QCgHJvGeJeNkP+byKq0rxFROV7Z+2et1VsRnTKa
+G73VululycslaVNVJ1zgyjbLiGH7HrfQy+4W+9OmTN6SpdTi3/UGVN4unUu0kzCq
+gc7dGtxRcw1PcOnlthYhGXmy5okLdWTK1au8CcEYof/UVKGFPP0UJAOyh9OktwID
+AQABo0IwQDAOBgNVHQ8BAf8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4E
+FgQUu//KjiOfT5nK2+JopqUVJxce2Q4wDQYJKoZIhvcNAQEMBQADggIBAB/Kzt3H
+vqGf2SdMC9wXmBFqiN495nFWcrKeGk6c1SuYJF2ba3uwM4IJvd8lRuqYnrYb/oM8
+0mJhwQTtzuDFycgTE1XnqGOtjHsB/ncw4c5omwX4Eu55MaBBRTUoCnGkJE+M3DyC
+B19m3H0Q/gxhswWV7uGugQ+o+MePTagjAiZrHYNSVc61LwDKgEDg4XSsYPWHgJ2u
+NmSRXbBoGOqKYcl3qJfEycel/FVL8/B/uWU9J2jQzGv6U53hkRrJXRqWbTKH7QMg
+yALOWr7Z6v2yTcQvG99fevX4i8buMTolUVVnjWQye+mew4K6Ki3pHrTgSAai/Gev
+HyICc/sgCq+dVEuhzf9gR7A/Xe8bVr2XIZYtCtFenTgCR2y59PYjJbigapordwj6
+xLEokCZYCDzifqrXPW+6MYgKBesntaFJ7qBFVHvmJ2WZICGoo7z7GJa7Um8M7YNR
+TOlZ4iBgxcJlkoKM8xAfDoqXvneCbT+PHV28SSe9zE8P4c52hgQjxcCMElv924Sg
+JPFI/2R80L5cFtHvma3AH/vLrrw4IgYmZNralw4/KBVEqE8AyvCazM90arQ+POuV
+7LXTWtiBmelDGDfrs7vRWGJB82bSj6p4lVQgw1oudCvV0b4YacCs1aTPObpRhANl
+6WLAYv7YTVWW4tAR+kg0Eeye7QUd5MjWHYbL
+-----END CERTIFICATE-----
+
+# Issuer: CN=GTS Root R3 O=Google Trust Services LLC
+# Subject: CN=GTS Root R3 O=Google Trust Services LLC
+# Label: "GTS Root R3"
+# Serial: 159662495401136852707857743206
+# MD5 Fingerprint: 3e:e7:9d:58:02:94:46:51:94:e5:e0:22:4a:8b:e7:73
+# SHA1 Fingerprint: ed:e5:71:80:2b:c8:92:b9:5b:83:3c:d2:32:68:3f:09:cd:a0:1e:46
+# SHA256 Fingerprint: 34:d8:a7:3e:e2:08:d9:bc:db:0d:95:65:20:93:4b:4e:40:e6:94:82:59:6e:8b:6f:73:c8:42:6b:01:0a:6f:48
+-----BEGIN CERTIFICATE-----
+MIICCTCCAY6gAwIBAgINAgPluILrIPglJ209ZjAKBggqhkjOPQQDAzBHMQswCQYD
+VQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZpY2VzIExMQzEUMBIG
+A1UEAxMLR1RTIFJvb3QgUjMwHhcNMTYwNjIyMDAwMDAwWhcNMzYwNjIyMDAwMDAw
+WjBHMQswCQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZpY2Vz
+IExMQzEUMBIGA1UEAxMLR1RTIFJvb3QgUjMwdjAQBgcqhkjOPQIBBgUrgQQAIgNi
+AAQfTzOHMymKoYTey8chWEGJ6ladK0uFxh1MJ7x/JlFyb+Kf1qPKzEUURout736G
+jOyxfi//qXGdGIRFBEFVbivqJn+7kAHjSxm65FSWRQmx1WyRRK2EE46ajA2ADDL2
+4CejQjBAMA4GA1UdDwEB/wQEAwIBhjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQW
+BBTB8Sa6oC2uhYHP0/EqEr24Cmf9vDAKBggqhkjOPQQDAwNpADBmAjEA9uEglRR7
+VKOQFhG/hMjqb2sXnh5GmCCbn9MN2azTL818+FsuVbu/3ZL3pAzcMeGiAjEA/Jdm
+ZuVDFhOD3cffL74UOO0BzrEXGhF16b0DjyZ+hOXJYKaV11RZt+cRLInUue4X
+-----END CERTIFICATE-----
+
+# Issuer: CN=GTS Root R4 O=Google Trust Services LLC
+# Subject: CN=GTS Root R4 O=Google Trust Services LLC
+# Label: "GTS Root R4"
+# Serial: 159662532700760215368942768210
+# MD5 Fingerprint: 43:96:83:77:19:4d:76:b3:9d:65:52:e4:1d:22:a5:e8
+# SHA1 Fingerprint: 77:d3:03:67:b5:e0:0c:15:f6:0c:38:61:df:7c:e1:3b:92:46:4d:47
+# SHA256 Fingerprint: 34:9d:fa:40:58:c5:e2:63:12:3b:39:8a:e7:95:57:3c:4e:13:13:c8:3f:e6:8f:93:55:6c:d5:e8:03:1b:3c:7d
+-----BEGIN CERTIFICATE-----
+MIICCTCCAY6gAwIBAgINAgPlwGjvYxqccpBQUjAKBggqhkjOPQQDAzBHMQswCQYD
+VQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZpY2VzIExMQzEUMBIG
+A1UEAxMLR1RTIFJvb3QgUjQwHhcNMTYwNjIyMDAwMDAwWhcNMzYwNjIyMDAwMDAw
+WjBHMQswCQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZpY2Vz
+IExMQzEUMBIGA1UEAxMLR1RTIFJvb3QgUjQwdjAQBgcqhkjOPQIBBgUrgQQAIgNi
+AATzdHOnaItgrkO4NcWBMHtLSZ37wWHO5t5GvWvVYRg1rkDdc/eJkTBa6zzuhXyi
+QHY7qca4R9gq55KRanPpsXI5nymfopjTX15YhmUPoYRlBtHci8nHc8iMai/lxKvR
+HYqjQjBAMA4GA1UdDwEB/wQEAwIBhjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQW
+BBSATNbrdP9JNqPV2Py1PsVq8JQdjDAKBggqhkjOPQQDAwNpADBmAjEA6ED/g94D
+9J+uHXqnLrmvT/aDHQ4thQEd0dlq7A/Cr8deVl5c1RxYIigL9zC2L7F8AjEA8GE8
+p/SgguMh1YQdc4acLa/KNJvxn7kjNuK8YAOdgLOaVsjh4rsUecrNIdSUtUlD
+-----END CERTIFICATE-----
+
+# Issuer: CN=Telia Root CA v2 O=Telia Finland Oyj
+# Subject: CN=Telia Root CA v2 O=Telia Finland Oyj
+# Label: "Telia Root CA v2"
+# Serial: 7288924052977061235122729490515358
+# MD5 Fingerprint: 0e:8f:ac:aa:82:df:85:b1:f4:dc:10:1c:fc:99:d9:48
+# SHA1 Fingerprint: b9:99:cd:d1:73:50:8a:c4:47:05:08:9c:8c:88:fb:be:a0:2b:40:cd
+# SHA256 Fingerprint: 24:2b:69:74:2f:cb:1e:5b:2a:bf:98:89:8b:94:57:21:87:54:4e:5b:4d:99:11:78:65:73:62:1f:6a:74:b8:2c
+-----BEGIN CERTIFICATE-----
+MIIFdDCCA1ygAwIBAgIPAWdfJ9b+euPkrL4JWwWeMA0GCSqGSIb3DQEBCwUAMEQx
+CzAJBgNVBAYTAkZJMRowGAYDVQQKDBFUZWxpYSBGaW5sYW5kIE95ajEZMBcGA1UE
+AwwQVGVsaWEgUm9vdCBDQSB2MjAeFw0xODExMjkxMTU1NTRaFw00MzExMjkxMTU1
+NTRaMEQxCzAJBgNVBAYTAkZJMRowGAYDVQQKDBFUZWxpYSBGaW5sYW5kIE95ajEZ
+MBcGA1UEAwwQVGVsaWEgUm9vdCBDQSB2MjCCAiIwDQYJKoZIhvcNAQEBBQADggIP
+ADCCAgoCggIBALLQPwe84nvQa5n44ndp586dpAO8gm2h/oFlH0wnrI4AuhZ76zBq
+AMCzdGh+sq/H1WKzej9Qyow2RCRj0jbpDIX2Q3bVTKFgcmfiKDOlyzG4OiIjNLh9
+vVYiQJ3q9HsDrWj8soFPmNB06o3lfc1jw6P23pLCWBnglrvFxKk9pXSW/q/5iaq9
+lRdU2HhE8Qx3FZLgmEKnpNaqIJLNwaCzlrI6hEKNfdWV5Nbb6WLEWLN5xYzTNTOD
+n3WhUidhOPFZPY5Q4L15POdslv5e2QJltI5c0BE0312/UqeBAMN/mUWZFdUXyApT
+7GPzmX3MaRKGwhfwAZ6/hLzRUssbkmbOpFPlob/E2wnW5olWK8jjfN7j/4nlNW4o
+6GwLI1GpJQXrSPjdscr6bAhR77cYbETKJuFzxokGgeWKrLDiKca5JLNrRBH0pUPC
+TEPlcDaMtjNXepUugqD0XBCzYYP2AgWGLnwtbNwDRm41k9V6lS/eINhbfpSQBGq6
+WT0EBXWdN6IOLj3rwaRSg/7Qa9RmjtzG6RJOHSpXqhC8fF6CfaamyfItufUXJ63R
+DolUK5X6wK0dmBR4M0KGCqlztft0DbcbMBnEWg4cJ7faGND/isgFuvGqHKI3t+ZI
+pEYslOqodmJHixBTB0hXbOKSTbauBcvcwUpej6w9GU7C7WB1K9vBykLVAgMBAAGj
+YzBhMB8GA1UdIwQYMBaAFHKs5DN5qkWH9v2sHZ7Wxy+G2CQ5MB0GA1UdDgQWBBRy
+rOQzeapFh/b9rB2e1scvhtgkOTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUw
+AwEB/zANBgkqhkiG9w0BAQsFAAOCAgEAoDtZpwmUPjaE0n4vOaWWl/oRrfxn83EJ
+8rKJhGdEr7nv7ZbsnGTbMjBvZ5qsfl+yqwE2foH65IRe0qw24GtixX1LDoJt0nZi
+0f6X+J8wfBj5tFJ3gh1229MdqfDBmgC9bXXYfef6xzijnHDoRnkDry5023X4blMM
+A8iZGok1GTzTyVR8qPAs5m4HeW9q4ebqkYJpCh3DflminmtGFZhb069GHWLIzoBS
+SRE/yQQSwxN8PzuKlts8oB4KtItUsiRnDe+Cy748fdHif64W1lZYudogsYMVoe+K
+TTJvQS8TUoKU1xrBeKJR3Stwbbca+few4GeXVtt8YVMJAygCQMez2P2ccGrGKMOF
+6eLtGpOg3kuYooQ+BXcBlj37tCAPnHICehIv1aO6UXivKitEZU61/Qrowc15h2Er
+3oBXRb9n8ZuRXqWk7FlIEA04x7D6w0RtBPV4UBySllva9bguulvP5fBqnUsvWHMt
+Ty3EHD70sz+rFQ47GUGKpMFXEmZxTPpT41frYpUJnlTd0cI8Vzy9OK2YZLe4A5pT
+VmBds9hCG1xLEooc6+t9xnppxyd/pPiL8uSUZodL6ZQHCRJ5irLrdATczvREWeAW
+ysUsWNc8e89ihmpQfTU2Zqf7N+cox9jQraVplI/owd8k+BsHMYeB2F326CjYSlKA
+rBPuUBQemMc=
+-----END CERTIFICATE-----
+
+# Issuer: CN=D-TRUST BR Root CA 1 2020 O=D-Trust GmbH
+# Subject: CN=D-TRUST BR Root CA 1 2020 O=D-Trust GmbH
+# Label: "D-TRUST BR Root CA 1 2020"
+# Serial: 165870826978392376648679885835942448534
+# MD5 Fingerprint: b5:aa:4b:d5:ed:f7:e3:55:2e:8f:72:0a:f3:75:b8:ed
+# SHA1 Fingerprint: 1f:5b:98:f0:e3:b5:f7:74:3c:ed:e6:b0:36:7d:32:cd:f4:09:41:67
+# SHA256 Fingerprint: e5:9a:aa:81:60:09:c2:2b:ff:5b:25:ba:d3:7d:f3:06:f0:49:79:7c:1f:81:d8:5a:b0:89:e6:57:bd:8f:00:44
+-----BEGIN CERTIFICATE-----
+MIIC2zCCAmCgAwIBAgIQfMmPK4TX3+oPyWWa00tNljAKBggqhkjOPQQDAzBIMQsw
+CQYDVQQGEwJERTEVMBMGA1UEChMMRC1UcnVzdCBHbWJIMSIwIAYDVQQDExlELVRS
+VVNUIEJSIFJvb3QgQ0EgMSAyMDIwMB4XDTIwMDIxMTA5NDUwMFoXDTM1MDIxMTA5
+NDQ1OVowSDELMAkGA1UEBhMCREUxFTATBgNVBAoTDEQtVHJ1c3QgR21iSDEiMCAG
+A1UEAxMZRC1UUlVTVCBCUiBSb290IENBIDEgMjAyMDB2MBAGByqGSM49AgEGBSuB
+BAAiA2IABMbLxyjR+4T1mu9CFCDhQ2tuda38KwOE1HaTJddZO0Flax7mNCq7dPYS
+zuht56vkPE4/RAiLzRZxy7+SmfSk1zxQVFKQhYN4lGdnoxwJGT11NIXe7WB9xwy0
+QVK5buXuQqOCAQ0wggEJMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFHOREKv/
+VbNafAkl1bK6CKBrqx9tMA4GA1UdDwEB/wQEAwIBBjCBxgYDVR0fBIG+MIG7MD6g
+PKA6hjhodHRwOi8vY3JsLmQtdHJ1c3QubmV0L2NybC9kLXRydXN0X2JyX3Jvb3Rf
+Y2FfMV8yMDIwLmNybDB5oHegdYZzbGRhcDovL2RpcmVjdG9yeS5kLXRydXN0Lm5l
+dC9DTj1ELVRSVVNUJTIwQlIlMjBSb290JTIwQ0ElMjAxJTIwMjAyMCxPPUQtVHJ1
+c3QlMjBHbWJILEM9REU/Y2VydGlmaWNhdGVyZXZvY2F0aW9ubGlzdDAKBggqhkjO
+PQQDAwNpADBmAjEAlJAtE/rhY/hhY+ithXhUkZy4kzg+GkHaQBZTQgjKL47xPoFW
+wKrY7RjEsK70PvomAjEA8yjixtsrmfu3Ubgko6SUeho/5jbiA1czijDLgsfWFBHV
+dWNbFJWcHwHP2NVypw87
+-----END CERTIFICATE-----
+
+# Issuer: CN=D-TRUST EV Root CA 1 2020 O=D-Trust GmbH
+# Subject: CN=D-TRUST EV Root CA 1 2020 O=D-Trust GmbH
+# Label: "D-TRUST EV Root CA 1 2020"
+# Serial: 126288379621884218666039612629459926992
+# MD5 Fingerprint: 8c:2d:9d:70:9f:48:99:11:06:11:fb:e9:cb:30:c0:6e
+# SHA1 Fingerprint: 61:db:8c:21:59:69:03:90:d8:7c:9c:12:86:54:cf:9d:3d:f4:dd:07
+# SHA256 Fingerprint: 08:17:0d:1a:a3:64:53:90:1a:2f:95:92:45:e3:47:db:0c:8d:37:ab:aa:bc:56:b8:1a:a1:00:dc:95:89:70:db
+-----BEGIN CERTIFICATE-----
+MIIC2zCCAmCgAwIBAgIQXwJB13qHfEwDo6yWjfv/0DAKBggqhkjOPQQDAzBIMQsw
+CQYDVQQGEwJERTEVMBMGA1UEChMMRC1UcnVzdCBHbWJIMSIwIAYDVQQDExlELVRS
+VVNUIEVWIFJvb3QgQ0EgMSAyMDIwMB4XDTIwMDIxMTEwMDAwMFoXDTM1MDIxMTA5
+NTk1OVowSDELMAkGA1UEBhMCREUxFTATBgNVBAoTDEQtVHJ1c3QgR21iSDEiMCAG
+A1UEAxMZRC1UUlVTVCBFViBSb290IENBIDEgMjAyMDB2MBAGByqGSM49AgEGBSuB
+BAAiA2IABPEL3YZDIBnfl4XoIkqbz52Yv7QFJsnL46bSj8WeeHsxiamJrSc8ZRCC
+/N/DnU7wMyPE0jL1HLDfMxddxfCxivnvubcUyilKwg+pf3VlSSowZ/Rk99Yad9rD
+wpdhQntJraOCAQ0wggEJMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFH8QARY3
+OqQo5FD4pPfsazK2/umLMA4GA1UdDwEB/wQEAwIBBjCBxgYDVR0fBIG+MIG7MD6g
+PKA6hjhodHRwOi8vY3JsLmQtdHJ1c3QubmV0L2NybC9kLXRydXN0X2V2X3Jvb3Rf
+Y2FfMV8yMDIwLmNybDB5oHegdYZzbGRhcDovL2RpcmVjdG9yeS5kLXRydXN0Lm5l
+dC9DTj1ELVRSVVNUJTIwRVYlMjBSb290JTIwQ0ElMjAxJTIwMjAyMCxPPUQtVHJ1
+c3QlMjBHbWJILEM9REU/Y2VydGlmaWNhdGVyZXZvY2F0aW9ubGlzdDAKBggqhkjO
+PQQDAwNpADBmAjEAyjzGKnXCXnViOTYAYFqLwZOZzNnbQTs7h5kXO9XMT8oi96CA
+y/m0sRtW9XLS/BnRAjEAkfcwkz8QRitxpNA7RJvAKQIFskF3UfN5Wp6OFKBOQtJb
+gfM0agPnIjhQW+0ZT0MW
+-----END CERTIFICATE-----
+
+# Issuer: CN=DigiCert TLS ECC P384 Root G5 O=DigiCert, Inc.
+# Subject: CN=DigiCert TLS ECC P384 Root G5 O=DigiCert, Inc.
+# Label: "DigiCert TLS ECC P384 Root G5"
+# Serial: 13129116028163249804115411775095713523
+# MD5 Fingerprint: d3:71:04:6a:43:1c:db:a6:59:e1:a8:a3:aa:c5:71:ed
+# SHA1 Fingerprint: 17:f3:de:5e:9f:0f:19:e9:8e:f6:1f:32:26:6e:20:c4:07:ae:30:ee
+# SHA256 Fingerprint: 01:8e:13:f0:77:25:32:cf:80:9b:d1:b1:72:81:86:72:83:fc:48:c6:e1:3b:e9:c6:98:12:85:4a:49:0c:1b:05
+-----BEGIN CERTIFICATE-----
+MIICGTCCAZ+gAwIBAgIQCeCTZaz32ci5PhwLBCou8zAKBggqhkjOPQQDAzBOMQsw
+CQYDVQQGEwJVUzEXMBUGA1UEChMORGlnaUNlcnQsIEluYy4xJjAkBgNVBAMTHURp
+Z2lDZXJ0IFRMUyBFQ0MgUDM4NCBSb290IEc1MB4XDTIxMDExNTAwMDAwMFoXDTQ2
+MDExNDIzNTk1OVowTjELMAkGA1UEBhMCVVMxFzAVBgNVBAoTDkRpZ2lDZXJ0LCBJ
+bmMuMSYwJAYDVQQDEx1EaWdpQ2VydCBUTFMgRUNDIFAzODQgUm9vdCBHNTB2MBAG
+ByqGSM49AgEGBSuBBAAiA2IABMFEoc8Rl1Ca3iOCNQfN0MsYndLxf3c1TzvdlHJS
+7cI7+Oz6e2tYIOyZrsn8aLN1udsJ7MgT9U7GCh1mMEy7H0cKPGEQQil8pQgO4CLp
+0zVozptjn4S1mU1YoI71VOeVyaNCMEAwHQYDVR0OBBYEFMFRRVBZqz7nLFr6ICIS
+B4CIfBFqMA4GA1UdDwEB/wQEAwIBhjAPBgNVHRMBAf8EBTADAQH/MAoGCCqGSM49
+BAMDA2gAMGUCMQCJao1H5+z8blUD2WdsJk6Dxv3J+ysTvLd6jLRl0mlpYxNjOyZQ
+LgGheQaRnUi/wr4CMEfDFXuxoJGZSZOoPHzoRgaLLPIxAJSdYsiJvRmEFOml+wG4
+DXZDjC5Ty3zfDBeWUA==
+-----END CERTIFICATE-----
+
+# Issuer: CN=DigiCert TLS RSA4096 Root G5 O=DigiCert, Inc.
+# Subject: CN=DigiCert TLS RSA4096 Root G5 O=DigiCert, Inc.
+# Label: "DigiCert TLS RSA4096 Root G5"
+# Serial: 11930366277458970227240571539258396554
+# MD5 Fingerprint: ac:fe:f7:34:96:a9:f2:b3:b4:12:4b:e4:27:41:6f:e1
+# SHA1 Fingerprint: a7:88:49:dc:5d:7c:75:8c:8c:de:39:98:56:b3:aa:d0:b2:a5:71:35
+# SHA256 Fingerprint: 37:1a:00:dc:05:33:b3:72:1a:7e:eb:40:e8:41:9e:70:79:9d:2b:0a:0f:2c:1d:80:69:31:65:f7:ce:c4:ad:75
+-----BEGIN CERTIFICATE-----
+MIIFZjCCA06gAwIBAgIQCPm0eKj6ftpqMzeJ3nzPijANBgkqhkiG9w0BAQwFADBN
+MQswCQYDVQQGEwJVUzEXMBUGA1UEChMORGlnaUNlcnQsIEluYy4xJTAjBgNVBAMT
+HERpZ2lDZXJ0IFRMUyBSU0E0MDk2IFJvb3QgRzUwHhcNMjEwMTE1MDAwMDAwWhcN
+NDYwMTE0MjM1OTU5WjBNMQswCQYDVQQGEwJVUzEXMBUGA1UEChMORGlnaUNlcnQs
+IEluYy4xJTAjBgNVBAMTHERpZ2lDZXJ0IFRMUyBSU0E0MDk2IFJvb3QgRzUwggIi
+MA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCz0PTJeRGd/fxmgefM1eS87IE+
+ajWOLrfn3q/5B03PMJ3qCQuZvWxX2hhKuHisOjmopkisLnLlvevxGs3npAOpPxG0
+2C+JFvuUAT27L/gTBaF4HI4o4EXgg/RZG5Wzrn4DReW+wkL+7vI8toUTmDKdFqgp
+wgscONyfMXdcvyej/Cestyu9dJsXLfKB2l2w4SMXPohKEiPQ6s+d3gMXsUJKoBZM
+pG2T6T867jp8nVid9E6P/DsjyG244gXazOvswzH016cpVIDPRFtMbzCe88zdH5RD
+nU1/cHAN1DrRN/BsnZvAFJNY781BOHW8EwOVfH/jXOnVDdXifBBiqmvwPXbzP6Po
+sMH976pXTayGpxi0KcEsDr9kvimM2AItzVwv8n/vFfQMFawKsPHTDU9qTXeXAaDx
+Zre3zu/O7Oyldcqs4+Fj97ihBMi8ez9dLRYiVu1ISf6nL3kwJZu6ay0/nTvEF+cd
+Lvvyz6b84xQslpghjLSR6Rlgg/IwKwZzUNWYOwbpx4oMYIwo+FKbbuH2TbsGJJvX
+KyY//SovcfXWJL5/MZ4PbeiPT02jP/816t9JXkGPhvnxd3lLG7SjXi/7RgLQZhNe
+XoVPzthwiHvOAbWWl9fNff2C+MIkwcoBOU+NosEUQB+cZtUMCUbW8tDRSHZWOkPL
+tgoRObqME2wGtZ7P6wIDAQABo0IwQDAdBgNVHQ4EFgQUUTMc7TZArxfTJc1paPKv
+TiM+s0EwDgYDVR0PAQH/BAQDAgGGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcN
+AQEMBQADggIBAGCmr1tfV9qJ20tQqcQjNSH/0GEwhJG3PxDPJY7Jv0Y02cEhJhxw
+GXIeo8mH/qlDZJY6yFMECrZBu8RHANmfGBg7sg7zNOok992vIGCukihfNudd5N7H
+PNtQOa27PShNlnx2xlv0wdsUpasZYgcYQF+Xkdycx6u1UQ3maVNVzDl92sURVXLF
+O4uJ+DQtpBflF+aZfTCIITfNMBc9uPK8qHWgQ9w+iUuQrm0D4ByjoJYJu32jtyoQ
+REtGBzRj7TG5BO6jm5qu5jF49OokYTurWGT/u4cnYiWB39yhL/btp/96j1EuMPik
+AdKFOV8BmZZvWltwGUb+hmA+rYAQCd05JS9Yf7vSdPD3Rh9GOUrYU9DzLjtxpdRv
+/PNn5AeP3SYZ4Y1b+qOTEZvpyDrDVWiakuFSdjjo4bq9+0/V77PnSIMx8IIh47a+
+p6tv75/fTM8BuGJqIz3nCU2AG3swpMPdB380vqQmsvZB6Akd4yCYqjdP//fx4ilw
+MUc/dNAUFvohigLVigmUdy7yWSiLfFCSCmZ4OIN1xLVaqBHG5cGdZlXPU8Sv13WF
+qUITVuwhd4GTWgzqltlJyqEI8pc7bZsEGCREjnwB8twl2F6GmrE52/WRMmrRpnCK
+ovfepEWFJqgejF0pW8hL2JpqA15w8oVPbEtoL8pU9ozaMv7Da4M/OMZ+
+-----END CERTIFICATE-----
+
+# Issuer: CN=Certainly Root R1 O=Certainly
+# Subject: CN=Certainly Root R1 O=Certainly
+# Label: "Certainly Root R1"
+# Serial: 188833316161142517227353805653483829216
+# MD5 Fingerprint: 07:70:d4:3e:82:87:a0:fa:33:36:13:f4:fa:33:e7:12
+# SHA1 Fingerprint: a0:50:ee:0f:28:71:f4:27:b2:12:6d:6f:50:96:25:ba:cc:86:42:af
+# SHA256 Fingerprint: 77:b8:2c:d8:64:4c:43:05:f7:ac:c5:cb:15:6b:45:67:50:04:03:3d:51:c6:0c:62:02:a8:e0:c3:34:67:d3:a0
+-----BEGIN CERTIFICATE-----
+MIIFRzCCAy+gAwIBAgIRAI4P+UuQcWhlM1T01EQ5t+AwDQYJKoZIhvcNAQELBQAw
+PTELMAkGA1UEBhMCVVMxEjAQBgNVBAoTCUNlcnRhaW5seTEaMBgGA1UEAxMRQ2Vy
+dGFpbmx5IFJvb3QgUjEwHhcNMjEwNDAxMDAwMDAwWhcNNDYwNDAxMDAwMDAwWjA9
+MQswCQYDVQQGEwJVUzESMBAGA1UEChMJQ2VydGFpbmx5MRowGAYDVQQDExFDZXJ0
+YWlubHkgUm9vdCBSMTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBANA2
+1B/q3avk0bbm+yLA3RMNansiExyXPGhjZjKcA7WNpIGD2ngwEc/csiu+kr+O5MQT
+vqRoTNoCaBZ0vrLdBORrKt03H2As2/X3oXyVtwxwhi7xOu9S98zTm/mLvg7fMbed
+aFySpvXl8wo0tf97ouSHocavFwDvA5HtqRxOcT3Si2yJ9HiG5mpJoM610rCrm/b0
+1C7jcvk2xusVtyWMOvwlDbMicyF0yEqWYZL1LwsYpfSt4u5BvQF5+paMjRcCMLT5
+r3gajLQ2EBAHBXDQ9DGQilHFhiZ5shGIXsXwClTNSaa/ApzSRKft43jvRl5tcdF5
+cBxGX1HpyTfcX35pe0HfNEXgO4T0oYoKNp43zGJS4YkNKPl6I7ENPT2a/Z2B7yyQ
+wHtETrtJ4A5KVpK8y7XdeReJkd5hiXSSqOMyhb5OhaRLWcsrxXiOcVTQAjeZjOVJ
+6uBUcqQRBi8LjMFbvrWhsFNunLhgkR9Za/kt9JQKl7XsxXYDVBtlUrpMklZRNaBA
+2CnbrlJ2Oy0wQJuK0EJWtLeIAaSHO1OWzaMWj/Nmqhexx2DgwUMFDO6bW2BvBlyH
+Wyf5QBGenDPBt+U1VwV/J84XIIwc/PH72jEpSe31C4SnT8H2TsIonPru4K8H+zMR
+eiFPCyEQtkA6qyI6BJyLm4SGcprSp6XEtHWRqSsjAgMBAAGjQjBAMA4GA1UdDwEB
+/wQEAwIBBjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBTgqj8ljZ9EXME66C6u
+d0yEPmcM9DANBgkqhkiG9w0BAQsFAAOCAgEAuVevuBLaV4OPaAszHQNTVfSVcOQr
+PbA56/qJYv331hgELyE03fFo8NWWWt7CgKPBjcZq91l3rhVkz1t5BXdm6ozTaw3d
+8VkswTOlMIAVRQdFGjEitpIAq5lNOo93r6kiyi9jyhXWx8bwPWz8HA2YEGGeEaIi
+1wrykXprOQ4vMMM2SZ/g6Q8CRFA3lFV96p/2O7qUpUzpvD5RtOjKkjZUbVwlKNrd
+rRT90+7iIgXr0PK3aBLXWopBGsaSpVo7Y0VPv+E6dyIvXL9G+VoDhRNCX8reU9di
+taY1BMJH/5n9hN9czulegChB8n3nHpDYT3Y+gjwN/KUD+nsa2UUeYNrEjvn8K8l7
+lcUq/6qJ34IxD3L/DCfXCh5WAFAeDJDBlrXYFIW7pw0WwfgHJBu6haEaBQmAupVj
+yTrsJZ9/nbqkRxWbRHDxakvWOF5D8xh+UG7pWijmZeZ3Gzr9Hb4DJqPb1OG7fpYn
+Kx3upPvaJVQTA945xsMfTZDsjxtK0hzthZU4UHlG1sGQUDGpXJpuHfUzVounmdLy
+yCwzk5Iwx06MZTMQZBf9JBeW0Y3COmor6xOLRPIh80oat3df1+2IpHLlOR+Vnb5n
+wXARPbv0+Em34yaXOp/SX3z7wJl8OSngex2/DaeP0ik0biQVy96QXr8axGbqwua6
+OV+KmalBWQewLK8=
+-----END CERTIFICATE-----
+
+# Issuer: CN=Certainly Root E1 O=Certainly
+# Subject: CN=Certainly Root E1 O=Certainly
+# Label: "Certainly Root E1"
+# Serial: 8168531406727139161245376702891150584
+# MD5 Fingerprint: 0a:9e:ca:cd:3e:52:50:c6:36:f3:4b:a3:ed:a7:53:e9
+# SHA1 Fingerprint: f9:e1:6d:dc:01:89:cf:d5:82:45:63:3e:c5:37:7d:c2:eb:93:6f:2b
+# SHA256 Fingerprint: b4:58:5f:22:e4:ac:75:6a:4e:86:12:a1:36:1c:5d:9d:03:1a:93:fd:84:fe:bb:77:8f:a3:06:8b:0f:c4:2d:c2
+-----BEGIN CERTIFICATE-----
+MIIB9zCCAX2gAwIBAgIQBiUzsUcDMydc+Y2aub/M+DAKBggqhkjOPQQDAzA9MQsw
+CQYDVQQGEwJVUzESMBAGA1UEChMJQ2VydGFpbmx5MRowGAYDVQQDExFDZXJ0YWlu
+bHkgUm9vdCBFMTAeFw0yMTA0MDEwMDAwMDBaFw00NjA0MDEwMDAwMDBaMD0xCzAJ
+BgNVBAYTAlVTMRIwEAYDVQQKEwlDZXJ0YWlubHkxGjAYBgNVBAMTEUNlcnRhaW5s
+eSBSb290IEUxMHYwEAYHKoZIzj0CAQYFK4EEACIDYgAE3m/4fxzf7flHh4axpMCK
++IKXgOqPyEpeKn2IaKcBYhSRJHpcnqMXfYqGITQYUBsQ3tA3SybHGWCA6TS9YBk2
+QNYphwk8kXr2vBMj3VlOBF7PyAIcGFPBMdjaIOlEjeR2o0IwQDAOBgNVHQ8BAf8E
+BAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQU8ygYy2R17ikq6+2uI1g4
+hevIIgcwCgYIKoZIzj0EAwMDaAAwZQIxALGOWiDDshliTd6wT99u0nCK8Z9+aozm
+ut6Dacpps6kFtZaSF4fC0urQe87YQVt8rgIwRt7qy12a7DLCZRawTDBcMPPaTnOG
+BtjOiQRINzf43TNRnXCve1XYAS59BWQOhriR
+-----END CERTIFICATE-----
+
+# Issuer: CN=Security Communication RootCA3 O=SECOM Trust Systems CO.,LTD.
+# Subject: CN=Security Communication RootCA3 O=SECOM Trust Systems CO.,LTD.
+# Label: "Security Communication RootCA3"
+# Serial: 16247922307909811815
+# MD5 Fingerprint: 1c:9a:16:ff:9e:5c:e0:4d:8a:14:01:f4:35:5d:29:26
+# SHA1 Fingerprint: c3:03:c8:22:74:92:e5:61:a2:9c:5f:79:91:2b:1e:44:13:91:30:3a
+# SHA256 Fingerprint: 24:a5:5c:2a:b0:51:44:2d:06:17:76:65:41:23:9a:4a:d0:32:d7:c5:51:75:aa:34:ff:de:2f:bc:4f:5c:52:94
+-----BEGIN CERTIFICATE-----
+MIIFfzCCA2egAwIBAgIJAOF8N0D9G/5nMA0GCSqGSIb3DQEBDAUAMF0xCzAJBgNV
+BAYTAkpQMSUwIwYDVQQKExxTRUNPTSBUcnVzdCBTeXN0ZW1zIENPLixMVEQuMScw
+JQYDVQQDEx5TZWN1cml0eSBDb21tdW5pY2F0aW9uIFJvb3RDQTMwHhcNMTYwNjE2
+MDYxNzE2WhcNMzgwMTE4MDYxNzE2WjBdMQswCQYDVQQGEwJKUDElMCMGA1UEChMc
+U0VDT00gVHJ1c3QgU3lzdGVtcyBDTy4sTFRELjEnMCUGA1UEAxMeU2VjdXJpdHkg
+Q29tbXVuaWNhdGlvbiBSb290Q0EzMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIIC
+CgKCAgEA48lySfcw3gl8qUCBWNO0Ot26YQ+TUG5pPDXC7ltzkBtnTCHsXzW7OT4r
+CmDvu20rhvtxosis5FaU+cmvsXLUIKx00rgVrVH+hXShuRD+BYD5UpOzQD11EKzA
+lrenfna84xtSGc4RHwsENPXY9Wk8d/Nk9A2qhd7gCVAEF5aEt8iKvE1y/By7z/MG
+TfmfZPd+pmaGNXHIEYBMwXFAWB6+oHP2/D5Q4eAvJj1+XCO1eXDe+uDRpdYMQXF7
+9+qMHIjH7Iv10S9VlkZ8WjtYO/u62C21Jdp6Ts9EriGmnpjKIG58u4iFW/vAEGK7
+8vknR+/RiTlDxN/e4UG/VHMgly1s2vPUB6PmudhvrvyMGS7TZ2crldtYXLVqAvO4
+g160a75BflcJdURQVc1aEWEhCmHCqYj9E7wtiS/NYeCVvsq1e+F7NGcLH7YMx3we
+GVPKp7FKFSBWFHA9K4IsD50VHUeAR/94mQ4xr28+j+2GaR57GIgUssL8gjMunEst
++3A7caoreyYn8xrC3PsXuKHqy6C0rtOUfnrQq8PsOC0RLoi/1D+tEjtCrI8Cbn3M
+0V9hvqG8OmpI6iZVIhZdXw3/JzOfGAN0iltSIEdrRU0id4xVJ/CvHozJgyJUt5rQ
+T9nO/NkuHJYosQLTA70lUhw0Zk8jq/R3gpYd0VcwCBEF/VfR2ccCAwEAAaNCMEAw
+HQYDVR0OBBYEFGQUfPxYchamCik0FW8qy7z8r6irMA4GA1UdDwEB/wQEAwIBBjAP
+BgNVHRMBAf8EBTADAQH/MA0GCSqGSIb3DQEBDAUAA4ICAQDcAiMI4u8hOscNtybS
+YpOnpSNyByCCYN8Y11StaSWSntkUz5m5UoHPrmyKO1o5yGwBQ8IibQLwYs1OY0PA
+FNr0Y/Dq9HHuTofjcan0yVflLl8cebsjqodEV+m9NU1Bu0soo5iyG9kLFwfl9+qd
+9XbXv8S2gVj/yP9kaWJ5rW4OH3/uHWnlt3Jxs/6lATWUVCvAUm2PVcTJ0rjLyjQI
+UYWg9by0F1jqClx6vWPGOi//lkkZhOpn2ASxYfQAW0q3nHE3GYV5v4GwxxMOdnE+
+OoAGrgYWp421wsTL/0ClXI2lyTrtcoHKXJg80jQDdwj98ClZXSEIx2C/pHF7uNke
+gr4Jr2VvKKu/S7XuPghHJ6APbw+LP6yVGPO5DtxnVW5inkYO0QR4ynKudtml+LLf
+iAlhi+8kTtFZP1rUPcmTPCtk9YENFpb3ksP+MW/oKjJ0DvRMmEoYDjBU1cXrvMUV
+nuiZIesnKwkK2/HmcBhWuwzkvvnoEKQTkrgc4NtnHVMDpCKn3F2SEDzq//wbEBrD
+2NCcnWXL0CsnMQMeNuE9dnUM/0Umud1RvCPHX9jYhxBAEg09ODfnRDwYwFMJZI//
+1ZqmfHAuc1Uh6N//g7kdPjIe1qZ9LPFm6Vwdp6POXiUyK+OVrCoHzrQoeIY8Laad
+TdJ0MN1kURXbg4NR16/9M51NZg==
+-----END CERTIFICATE-----
+
+# Issuer: CN=Security Communication ECC RootCA1 O=SECOM Trust Systems CO.,LTD.
+# Subject: CN=Security Communication ECC RootCA1 O=SECOM Trust Systems CO.,LTD.
+# Label: "Security Communication ECC RootCA1"
+# Serial: 15446673492073852651
+# MD5 Fingerprint: 7e:43:b0:92:68:ec:05:43:4c:98:ab:5d:35:2e:7e:86
+# SHA1 Fingerprint: b8:0e:26:a9:bf:d2:b2:3b:c0:ef:46:c9:ba:c7:bb:f6:1d:0d:41:41
+# SHA256 Fingerprint: e7:4f:bd:a5:5b:d5:64:c4:73:a3:6b:44:1a:a7:99:c8:a6:8e:07:74:40:e8:28:8b:9f:a1:e5:0e:4b:ba:ca:11
+-----BEGIN CERTIFICATE-----
+MIICODCCAb6gAwIBAgIJANZdm7N4gS7rMAoGCCqGSM49BAMDMGExCzAJBgNVBAYT
+AkpQMSUwIwYDVQQKExxTRUNPTSBUcnVzdCBTeXN0ZW1zIENPLixMVEQuMSswKQYD
+VQQDEyJTZWN1cml0eSBDb21tdW5pY2F0aW9uIEVDQyBSb290Q0ExMB4XDTE2MDYx
+NjA1MTUyOFoXDTM4MDExODA1MTUyOFowYTELMAkGA1UEBhMCSlAxJTAjBgNVBAoT
+HFNFQ09NIFRydXN0IFN5c3RlbXMgQ08uLExURC4xKzApBgNVBAMTIlNlY3VyaXR5
+IENvbW11bmljYXRpb24gRUNDIFJvb3RDQTEwdjAQBgcqhkjOPQIBBgUrgQQAIgNi
+AASkpW9gAwPDvTH00xecK4R1rOX9PVdu12O/5gSJko6BnOPpR27KkBLIE+Cnnfdl
+dB9sELLo5OnvbYUymUSxXv3MdhDYW72ixvnWQuRXdtyQwjWpS4g8EkdtXP9JTxpK
+ULGjQjBAMB0GA1UdDgQWBBSGHOf+LaVKiwj+KBH6vqNm+GBZLzAOBgNVHQ8BAf8E
+BAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAKBggqhkjOPQQDAwNoADBlAjAVXUI9/Lbu
+9zuxNuie9sRGKEkz0FhDKmMpzE2xtHqiuQ04pV1IKv3LsnNdo4gIxwwCMQDAqy0O
+be0YottT6SXbVQjgUMzfRGEWgqtJsLKB7HOHeLRMsmIbEvoWTSVLY70eN9k=
+-----END CERTIFICATE-----
+
+# Issuer: CN=BJCA Global Root CA1 O=BEIJING CERTIFICATE AUTHORITY
+# Subject: CN=BJCA Global Root CA1 O=BEIJING CERTIFICATE AUTHORITY
+# Label: "BJCA Global Root CA1"
+# Serial: 113562791157148395269083148143378328608
+# MD5 Fingerprint: 42:32:99:76:43:33:36:24:35:07:82:9b:28:f9:d0:90
+# SHA1 Fingerprint: d5:ec:8d:7b:4c:ba:79:f4:e7:e8:cb:9d:6b:ae:77:83:10:03:21:6a
+# SHA256 Fingerprint: f3:89:6f:88:fe:7c:0a:88:27:66:a7:fa:6a:d2:74:9f:b5:7a:7f:3e:98:fb:76:9c:1f:a7:b0:9c:2c:44:d5:ae
+-----BEGIN CERTIFICATE-----
+MIIFdDCCA1ygAwIBAgIQVW9l47TZkGobCdFsPsBsIDANBgkqhkiG9w0BAQsFADBU
+MQswCQYDVQQGEwJDTjEmMCQGA1UECgwdQkVJSklORyBDRVJUSUZJQ0FURSBBVVRI
+T1JJVFkxHTAbBgNVBAMMFEJKQ0EgR2xvYmFsIFJvb3QgQ0ExMB4XDTE5MTIxOTAz
+MTYxN1oXDTQ0MTIxMjAzMTYxN1owVDELMAkGA1UEBhMCQ04xJjAkBgNVBAoMHUJF
+SUpJTkcgQ0VSVElGSUNBVEUgQVVUSE9SSVRZMR0wGwYDVQQDDBRCSkNBIEdsb2Jh
+bCBSb290IENBMTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAPFmCL3Z
+xRVhy4QEQaVpN3cdwbB7+sN3SJATcmTRuHyQNZ0YeYjjlwE8R4HyDqKYDZ4/N+AZ
+spDyRhySsTphzvq3Rp4Dhtczbu33RYx2N95ulpH3134rhxfVizXuhJFyV9xgw8O5
+58dnJCNPYwpj9mZ9S1WnP3hkSWkSl+BMDdMJoDIwOvqfwPKcxRIqLhy1BDPapDgR
+at7GGPZHOiJBhyL8xIkoVNiMpTAK+BcWyqw3/XmnkRd4OJmtWO2y3syJfQOcs4ll
+5+M7sSKGjwZteAf9kRJ/sGsciQ35uMt0WwfCyPQ10WRjeulumijWML3mG90Vr4Tq
+nMfK9Q7q8l0ph49pczm+LiRvRSGsxdRpJQaDrXpIhRMsDQa4bHlW/KNnMoH1V6XK
+V0Jp6VwkYe/iMBhORJhVb3rCk9gZtt58R4oRTklH2yiUAguUSiz5EtBP6DF+bHq/
+pj+bOT0CFqMYs2esWz8sgytnOYFcuX6U1WTdno9uruh8W7TXakdI136z1C2OVnZO
+z2nxbkRs1CTqjSShGL+9V/6pmTW12xB3uD1IutbB5/EjPtffhZ0nPNRAvQoMvfXn
+jSXWgXSHRtQpdaJCbPdzied9v3pKH9MiyRVVz99vfFXQpIsHETdfg6YmV6YBW37+
+WGgHqel62bno/1Afq8K0wM7o6v0PvY1NuLxxAgMBAAGjQjBAMB0GA1UdDgQWBBTF
+7+3M2I0hxkjk49cULqcWk+WYATAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQE
+AwIBBjANBgkqhkiG9w0BAQsFAAOCAgEAUoKsITQfI/Ki2Pm4rzc2IInRNwPWaZ+4
+YRC6ojGYWUfo0Q0lHhVBDOAqVdVXUsv45Mdpox1NcQJeXyFFYEhcCY5JEMEE3Kli
+awLwQ8hOnThJdMkycFRtwUf8jrQ2ntScvd0g1lPJGKm1Vrl2i5VnZu69mP6u775u
++2D2/VnGKhs/I0qUJDAnyIm860Qkmss9vk/Ves6OF8tiwdneHg56/0OGNFK8YT88
+X7vZdrRTvJez/opMEi4r89fO4aL/3Xtw+zuhTaRjAv04l5U/BXCga99igUOLtFkN
+SoxUnMW7gZ/NfaXvCyUeOiDbHPwfmGcCCtRzRBPbUYQaVQNW4AB+dAb/OMRyHdOo
+P2gxXdMJxy6MW2Pg6Nwe0uxhHvLe5e/2mXZgLR6UcnHGCyoyx5JO1UbXHfmpGQrI
++pXObSOYqgs4rZpWDW+N8TEAiMEXnM0ZNjX+VVOg4DwzX5Ze4jLp3zO7Bkqp2IRz
+znfSxqxx4VyjHQy7Ct9f4qNx2No3WqB4K/TUfet27fJhcKVlmtOJNBir+3I+17Q9
+eVzYH6Eze9mCUAyTF6ps3MKCuwJXNq+YJyo5UOGwifUll35HaBC07HPKs5fRJNz2
+YqAo07WjuGS3iGJCz51TzZm+ZGiPTx4SSPfSKcOYKMryMguTjClPPGAyzQWWYezy
+r/6zcCwupvI=
+-----END CERTIFICATE-----
+
+# Issuer: CN=BJCA Global Root CA2 O=BEIJING CERTIFICATE AUTHORITY
+# Subject: CN=BJCA Global Root CA2 O=BEIJING CERTIFICATE AUTHORITY
+# Label: "BJCA Global Root CA2"
+# Serial: 58605626836079930195615843123109055211
+# MD5 Fingerprint: 5e:0a:f6:47:5f:a6:14:e8:11:01:95:3f:4d:01:eb:3c
+# SHA1 Fingerprint: f4:27:86:eb:6e:b8:6d:88:31:67:02:fb:ba:66:a4:53:00:aa:7a:a6
+# SHA256 Fingerprint: 57:4d:f6:93:1e:27:80:39:66:7b:72:0a:fd:c1:60:0f:c2:7e:b6:6d:d3:09:29:79:fb:73:85:64:87:21:28:82
+-----BEGIN CERTIFICATE-----
+MIICJTCCAaugAwIBAgIQLBcIfWQqwP6FGFkGz7RK6zAKBggqhkjOPQQDAzBUMQsw
+CQYDVQQGEwJDTjEmMCQGA1UECgwdQkVJSklORyBDRVJUSUZJQ0FURSBBVVRIT1JJ
+VFkxHTAbBgNVBAMMFEJKQ0EgR2xvYmFsIFJvb3QgQ0EyMB4XDTE5MTIxOTAzMTgy
+MVoXDTQ0MTIxMjAzMTgyMVowVDELMAkGA1UEBhMCQ04xJjAkBgNVBAoMHUJFSUpJ
+TkcgQ0VSVElGSUNBVEUgQVVUSE9SSVRZMR0wGwYDVQQDDBRCSkNBIEdsb2JhbCBS
+b290IENBMjB2MBAGByqGSM49AgEGBSuBBAAiA2IABJ3LgJGNU2e1uVCxA/jlSR9B
+IgmwUVJY1is0j8USRhTFiy8shP8sbqjV8QnjAyEUxEM9fMEsxEtqSs3ph+B99iK+
++kpRuDCK/eHeGBIK9ke35xe/J4rUQUyWPGCWwf0VHKNCMEAwHQYDVR0OBBYEFNJK
+sVF/BvDRgh9Obl+rg/xI1LCRMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQD
+AgEGMAoGCCqGSM49BAMDA2gAMGUCMBq8W9f+qdJUDkpd0m2xQNz0Q9XSSpkZElaA
+94M04TVOSG0ED1cxMDAtsaqdAzjbBgIxAMvMh1PLet8gUXOQwKhbYdDFUDn9hf7B
+43j4ptZLvZuHjw/l1lOWqzzIQNph91Oj9w==
+-----END CERTIFICATE-----
+
+# Issuer: CN=Sectigo Public Server Authentication Root E46 O=Sectigo Limited
+# Subject: CN=Sectigo Public Server Authentication Root E46 O=Sectigo Limited
+# Label: "Sectigo Public Server Authentication Root E46"
+# Serial: 88989738453351742415770396670917916916
+# MD5 Fingerprint: 28:23:f8:b2:98:5c:37:16:3b:3e:46:13:4e:b0:b3:01
+# SHA1 Fingerprint: ec:8a:39:6c:40:f0:2e:bc:42:75:d4:9f:ab:1c:1a:5b:67:be:d2:9a
+# SHA256 Fingerprint: c9:0f:26:f0:fb:1b:40:18:b2:22:27:51:9b:5c:a2:b5:3e:2c:a5:b3:be:5c:f1:8e:fe:1b:ef:47:38:0c:53:83
+-----BEGIN CERTIFICATE-----
+MIICOjCCAcGgAwIBAgIQQvLM2htpN0RfFf51KBC49DAKBggqhkjOPQQDAzBfMQsw
+CQYDVQQGEwJHQjEYMBYGA1UEChMPU2VjdGlnbyBMaW1pdGVkMTYwNAYDVQQDEy1T
+ZWN0aWdvIFB1YmxpYyBTZXJ2ZXIgQXV0aGVudGljYXRpb24gUm9vdCBFNDYwHhcN
+MjEwMzIyMDAwMDAwWhcNNDYwMzIxMjM1OTU5WjBfMQswCQYDVQQGEwJHQjEYMBYG
+A1UEChMPU2VjdGlnbyBMaW1pdGVkMTYwNAYDVQQDEy1TZWN0aWdvIFB1YmxpYyBT
+ZXJ2ZXIgQXV0aGVudGljYXRpb24gUm9vdCBFNDYwdjAQBgcqhkjOPQIBBgUrgQQA
+IgNiAAR2+pmpbiDt+dd34wc7qNs9Xzjoq1WmVk/WSOrsfy2qw7LFeeyZYX8QeccC
+WvkEN/U0NSt3zn8gj1KjAIns1aeibVvjS5KToID1AZTc8GgHHs3u/iVStSBDHBv+
+6xnOQ6OjQjBAMB0GA1UdDgQWBBTRItpMWfFLXyY4qp3W7usNw/upYTAOBgNVHQ8B
+Af8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB/zAKBggqhkjOPQQDAwNnADBkAjAn7qRa
+qCG76UeXlImldCBteU/IvZNeWBj7LRoAasm4PdCkT0RHlAFWovgzJQxC36oCMB3q
+4S6ILuH5px0CMk7yn2xVdOOurvulGu7t0vzCAxHrRVxgED1cf5kDW21USAGKcw==
+-----END CERTIFICATE-----
+
+# Issuer: CN=Sectigo Public Server Authentication Root R46 O=Sectigo Limited
+# Subject: CN=Sectigo Public Server Authentication Root R46 O=Sectigo Limited
+# Label: "Sectigo Public Server Authentication Root R46"
+# Serial: 156256931880233212765902055439220583700
+# MD5 Fingerprint: 32:10:09:52:00:d5:7e:6c:43:df:15:c0:b1:16:93:e5
+# SHA1 Fingerprint: ad:98:f9:f3:e4:7d:75:3b:65:d4:82:b3:a4:52:17:bb:6e:f5:e4:38
+# SHA256 Fingerprint: 7b:b6:47:a6:2a:ee:ac:88:bf:25:7a:a5:22:d0:1f:fe:a3:95:e0:ab:45:c7:3f:93:f6:56:54:ec:38:f2:5a:06
+-----BEGIN CERTIFICATE-----
+MIIFijCCA3KgAwIBAgIQdY39i658BwD6qSWn4cetFDANBgkqhkiG9w0BAQwFADBf
+MQswCQYDVQQGEwJHQjEYMBYGA1UEChMPU2VjdGlnbyBMaW1pdGVkMTYwNAYDVQQD
+Ey1TZWN0aWdvIFB1YmxpYyBTZXJ2ZXIgQXV0aGVudGljYXRpb24gUm9vdCBSNDYw
+HhcNMjEwMzIyMDAwMDAwWhcNNDYwMzIxMjM1OTU5WjBfMQswCQYDVQQGEwJHQjEY
+MBYGA1UEChMPU2VjdGlnbyBMaW1pdGVkMTYwNAYDVQQDEy1TZWN0aWdvIFB1Ymxp
+YyBTZXJ2ZXIgQXV0aGVudGljYXRpb24gUm9vdCBSNDYwggIiMA0GCSqGSIb3DQEB
+AQUAA4ICDwAwggIKAoICAQCTvtU2UnXYASOgHEdCSe5jtrch/cSV1UgrJnwUUxDa
+ef0rty2k1Cz66jLdScK5vQ9IPXtamFSvnl0xdE8H/FAh3aTPaE8bEmNtJZlMKpnz
+SDBh+oF8HqcIStw+KxwfGExxqjWMrfhu6DtK2eWUAtaJhBOqbchPM8xQljeSM9xf
+iOefVNlI8JhD1mb9nxc4Q8UBUQvX4yMPFF1bFOdLvt30yNoDN9HWOaEhUTCDsG3X
+ME6WW5HwcCSrv0WBZEMNvSE6Lzzpng3LILVCJ8zab5vuZDCQOc2TZYEhMbUjUDM3
+IuM47fgxMMxF/mL50V0yeUKH32rMVhlATc6qu/m1dkmU8Sf4kaWD5QazYw6A3OAS
+VYCmO2a0OYctyPDQ0RTp5A1NDvZdV3LFOxxHVp3i1fuBYYzMTYCQNFu31xR13NgE
+SJ/AwSiItOkcyqex8Va3e0lMWeUgFaiEAin6OJRpmkkGj80feRQXEgyDet4fsZfu
++Zd4KKTIRJLpfSYFplhym3kT2BFfrsU4YjRosoYwjviQYZ4ybPUHNs2iTG7sijbt
+8uaZFURww3y8nDnAtOFr94MlI1fZEoDlSfB1D++N6xybVCi0ITz8fAr/73trdf+L
+HaAZBav6+CuBQug4urv7qv094PPK306Xlynt8xhW6aWWrL3DkJiy4Pmi1KZHQ3xt
+zwIDAQABo0IwQDAdBgNVHQ4EFgQUVnNYZJX5khqwEioEYnmhQBWIIUkwDgYDVR0P
+AQH/BAQDAgGGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEMBQADggIBAC9c
+mTz8Bl6MlC5w6tIyMY208FHVvArzZJ8HXtXBc2hkeqK5Duj5XYUtqDdFqij0lgVQ
+YKlJfp/imTYpE0RHap1VIDzYm/EDMrraQKFz6oOht0SmDpkBm+S8f74TlH7Kph52
+gDY9hAaLMyZlbcp+nv4fjFg4exqDsQ+8FxG75gbMY/qB8oFM2gsQa6H61SilzwZA
+Fv97fRheORKkU55+MkIQpiGRqRxOF3yEvJ+M0ejf5lG5Nkc/kLnHvALcWxxPDkjB
+JYOcCj+esQMzEhonrPcibCTRAUH4WAP+JWgiH5paPHxsnnVI84HxZmduTILA7rpX
+DhjvLpr3Etiga+kFpaHpaPi8TD8SHkXoUsCjvxInebnMMTzD9joiFgOgyY9mpFui
+TdaBJQbpdqQACj7LzTWb4OE4y2BThihCQRxEV+ioratF4yUQvNs+ZUH7G6aXD+u5
+dHn5HrwdVw1Hr8Mvn4dGp+smWg9WY7ViYG4A++MnESLn/pmPNPW56MORcr3Ywx65
+LvKRRFHQV80MNNVIIb/bE/FmJUNS0nAiNs2fxBx1IK1jcmMGDw4nztJqDby1ORrp
+0XZ60Vzk50lJLVU3aPAaOpg+VBeHVOmmJ1CJeyAvP/+/oYtKR5j/K3tJPsMpRmAY
+QqszKbrAKbkTidOIijlBO8n9pu0f9GBj39ItVQGL
+-----END CERTIFICATE-----
+
+# Issuer: CN=SSL.com TLS RSA Root CA 2022 O=SSL Corporation
+# Subject: CN=SSL.com TLS RSA Root CA 2022 O=SSL Corporation
+# Label: "SSL.com TLS RSA Root CA 2022"
+# Serial: 148535279242832292258835760425842727825
+# MD5 Fingerprint: d8:4e:c6:59:30:d8:fe:a0:d6:7a:5a:2c:2c:69:78:da
+# SHA1 Fingerprint: ec:2c:83:40:72:af:26:95:10:ff:0e:f2:03:ee:31:70:f6:78:9d:ca
+# SHA256 Fingerprint: 8f:af:7d:2e:2c:b4:70:9b:b8:e0:b3:36:66:bf:75:a5:dd:45:b5:de:48:0f:8e:a8:d4:bf:e6:be:bc:17:f2:ed
+-----BEGIN CERTIFICATE-----
+MIIFiTCCA3GgAwIBAgIQb77arXO9CEDii02+1PdbkTANBgkqhkiG9w0BAQsFADBO
+MQswCQYDVQQGEwJVUzEYMBYGA1UECgwPU1NMIENvcnBvcmF0aW9uMSUwIwYDVQQD
+DBxTU0wuY29tIFRMUyBSU0EgUm9vdCBDQSAyMDIyMB4XDTIyMDgyNTE2MzQyMloX
+DTQ2MDgxOTE2MzQyMVowTjELMAkGA1UEBhMCVVMxGDAWBgNVBAoMD1NTTCBDb3Jw
+b3JhdGlvbjElMCMGA1UEAwwcU1NMLmNvbSBUTFMgUlNBIFJvb3QgQ0EgMjAyMjCC
+AiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBANCkCXJPQIgSYT41I57u9nTP
+L3tYPc48DRAokC+X94xI2KDYJbFMsBFMF3NQ0CJKY7uB0ylu1bUJPiYYf7ISf5OY
+t6/wNr/y7hienDtSxUcZXXTzZGbVXcdotL8bHAajvI9AI7YexoS9UcQbOcGV0ins
+S657Lb85/bRi3pZ7QcacoOAGcvvwB5cJOYF0r/c0WRFXCsJbwST0MXMwgsadugL3
+PnxEX4MN8/HdIGkWCVDi1FW24IBydm5MR7d1VVm0U3TZlMZBrViKMWYPHqIbKUBO
+L9975hYsLfy/7PO0+r4Y9ptJ1O4Fbtk085zx7AGL0SDGD6C1vBdOSHtRwvzpXGk3
+R2azaPgVKPC506QVzFpPulJwoxJF3ca6TvvC0PeoUidtbnm1jPx7jMEWTO6Af77w
+dr5BUxIzrlo4QqvXDz5BjXYHMtWrifZOZ9mxQnUjbvPNQrL8VfVThxc7wDNY8VLS
++YCk8OjwO4s4zKTGkH8PnP2L0aPP2oOnaclQNtVcBdIKQXTbYxE3waWglksejBYS
+d66UNHsef8JmAOSqg+qKkK3ONkRN0VHpvB/zagX9wHQfJRlAUW7qglFA35u5CCoG
+AtUjHBPW6dvbxrB6y3snm/vg1UYk7RBLY0ulBY+6uB0rpvqR4pJSvezrZ5dtmi2f
+gTIFZzL7SAg/2SW4BCUvAgMBAAGjYzBhMA8GA1UdEwEB/wQFMAMBAf8wHwYDVR0j
+BBgwFoAU+y437uOEeicuzRk1sTN8/9REQrkwHQYDVR0OBBYEFPsuN+7jhHonLs0Z
+NbEzfP/UREK5MA4GA1UdDwEB/wQEAwIBhjANBgkqhkiG9w0BAQsFAAOCAgEAjYlt
+hEUY8U+zoO9opMAdrDC8Z2awms22qyIZZtM7QbUQnRC6cm4pJCAcAZli05bg4vsM
+QtfhWsSWTVTNj8pDU/0quOr4ZcoBwq1gaAafORpR2eCNJvkLTqVTJXojpBzOCBvf
+R4iyrT7gJ4eLSYwfqUdYe5byiB0YrrPRpgqU+tvT5TgKa3kSM/tKWTcWQA673vWJ
+DPFs0/dRa1419dvAJuoSc06pkZCmF8NsLzjUo3KUQyxi4U5cMj29TH0ZR6LDSeeW
+P4+a0zvkEdiLA9z2tmBVGKaBUfPhqBVq6+AL8BQx1rmMRTqoENjwuSfr98t67wVy
+lrXEj5ZzxOhWc5y8aVFjvO9nHEMaX3cZHxj4HCUp+UmZKbaSPaKDN7EgkaibMOlq
+bLQjk2UEqxHzDh1TJElTHaE/nUiSEeJ9DU/1172iWD54nR4fK/4huxoTtrEoZP2w
+AgDHbICivRZQIA9ygV/MlP+7mea6kMvq+cYMwq7FGc4zoWtcu358NFcXrfA/rs3q
+r5nsLFR+jM4uElZI7xc7P0peYNLcdDa8pUNjyw9bowJWCZ4kLOGGgYz+qxcs+sji
+Mho6/4UIyYOf8kpIEFR3N+2ivEC+5BB09+Rbu7nzifmPQdjH5FCQNYA+HLhNkNPU
+98OwoX6EyneSMSy4kLGCenROmxMmtNVQZlR4rmA=
+-----END CERTIFICATE-----
+
+# Issuer: CN=SSL.com TLS ECC Root CA 2022 O=SSL Corporation
+# Subject: CN=SSL.com TLS ECC Root CA 2022 O=SSL Corporation
+# Label: "SSL.com TLS ECC Root CA 2022"
+# Serial: 26605119622390491762507526719404364228
+# MD5 Fingerprint: 99:d7:5c:f1:51:36:cc:e9:ce:d9:19:2e:77:71:56:c5
+# SHA1 Fingerprint: 9f:5f:d9:1a:54:6d:f5:0c:71:f0:ee:7a:bd:17:49:98:84:73:e2:39
+# SHA256 Fingerprint: c3:2f:fd:9f:46:f9:36:d1:6c:36:73:99:09:59:43:4b:9a:d6:0a:af:bb:9e:7c:f3:36:54:f1:44:cc:1b:a1:43
+-----BEGIN CERTIFICATE-----
+MIICOjCCAcCgAwIBAgIQFAP1q/s3ixdAW+JDsqXRxDAKBggqhkjOPQQDAzBOMQsw
+CQYDVQQGEwJVUzEYMBYGA1UECgwPU1NMIENvcnBvcmF0aW9uMSUwIwYDVQQDDBxT
+U0wuY29tIFRMUyBFQ0MgUm9vdCBDQSAyMDIyMB4XDTIyMDgyNTE2MzM0OFoXDTQ2
+MDgxOTE2MzM0N1owTjELMAkGA1UEBhMCVVMxGDAWBgNVBAoMD1NTTCBDb3Jwb3Jh
+dGlvbjElMCMGA1UEAwwcU1NMLmNvbSBUTFMgRUNDIFJvb3QgQ0EgMjAyMjB2MBAG
+ByqGSM49AgEGBSuBBAAiA2IABEUpNXP6wrgjzhR9qLFNoFs27iosU8NgCTWyJGYm
+acCzldZdkkAZDsalE3D07xJRKF3nzL35PIXBz5SQySvOkkJYWWf9lCcQZIxPBLFN
+SeR7T5v15wj4A4j3p8OSSxlUgaNjMGEwDwYDVR0TAQH/BAUwAwEB/zAfBgNVHSME
+GDAWgBSJjy+j6CugFFR781a4Jl9nOAuc0DAdBgNVHQ4EFgQUiY8vo+groBRUe/NW
+uCZfZzgLnNAwDgYDVR0PAQH/BAQDAgGGMAoGCCqGSM49BAMDA2gAMGUCMFXjIlbp
+15IkWE8elDIPDAI2wv2sdDJO4fscgIijzPvX6yv/N33w7deedWo1dlJF4AIxAMeN
+b0Igj762TVntd00pxCAgRWSGOlDGxK0tk/UYfXLtqc/ErFc2KAhl3zx5Zn6g6g==
+-----END CERTIFICATE-----
+
+# Issuer: CN=Atos TrustedRoot Root CA ECC TLS 2021 O=Atos
+# Subject: CN=Atos TrustedRoot Root CA ECC TLS 2021 O=Atos
+# Label: "Atos TrustedRoot Root CA ECC TLS 2021"
+# Serial: 81873346711060652204712539181482831616
+# MD5 Fingerprint: 16:9f:ad:f1:70:ad:79:d6:ed:29:b4:d1:c5:79:70:a8
+# SHA1 Fingerprint: 9e:bc:75:10:42:b3:02:f3:81:f4:f7:30:62:d4:8f:c3:a7:51:b2:dd
+# SHA256 Fingerprint: b2:fa:e5:3e:14:cc:d7:ab:92:12:06:47:01:ae:27:9c:1d:89:88:fa:cb:77:5f:a8:a0:08:91:4e:66:39:88:a8
+-----BEGIN CERTIFICATE-----
+MIICFTCCAZugAwIBAgIQPZg7pmY9kGP3fiZXOATvADAKBggqhkjOPQQDAzBMMS4w
+LAYDVQQDDCVBdG9zIFRydXN0ZWRSb290IFJvb3QgQ0EgRUNDIFRMUyAyMDIxMQ0w
+CwYDVQQKDARBdG9zMQswCQYDVQQGEwJERTAeFw0yMTA0MjIwOTI2MjNaFw00MTA0
+MTcwOTI2MjJaMEwxLjAsBgNVBAMMJUF0b3MgVHJ1c3RlZFJvb3QgUm9vdCBDQSBF
+Q0MgVExTIDIwMjExDTALBgNVBAoMBEF0b3MxCzAJBgNVBAYTAkRFMHYwEAYHKoZI
+zj0CAQYFK4EEACIDYgAEloZYKDcKZ9Cg3iQZGeHkBQcfl+3oZIK59sRxUM6KDP/X
+tXa7oWyTbIOiaG6l2b4siJVBzV3dscqDY4PMwL502eCdpO5KTlbgmClBk1IQ1SQ4
+AjJn8ZQSb+/Xxd4u/RmAo0IwQDAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBR2
+KCXWfeBmmnoJsmo7jjPXNtNPojAOBgNVHQ8BAf8EBAMCAYYwCgYIKoZIzj0EAwMD
+aAAwZQIwW5kp85wxtolrbNa9d+F851F+uDrNozZffPc8dz7kUK2o59JZDCaOMDtu
+CCrCp1rIAjEAmeMM56PDr9NJLkaCI2ZdyQAUEv049OGYa3cpetskz2VAv9LcjBHo
+9H1/IISpQuQo
+-----END CERTIFICATE-----
+
+# Issuer: CN=Atos TrustedRoot Root CA RSA TLS 2021 O=Atos
+# Subject: CN=Atos TrustedRoot Root CA RSA TLS 2021 O=Atos
+# Label: "Atos TrustedRoot Root CA RSA TLS 2021"
+# Serial: 111436099570196163832749341232207667876
+# MD5 Fingerprint: d4:d3:46:b8:9a:c0:9c:76:5d:9e:3a:c3:b9:99:31:d2
+# SHA1 Fingerprint: 18:52:3b:0d:06:37:e4:d6:3a:df:23:e4:98:fb:5b:16:fb:86:74:48
+# SHA256 Fingerprint: 81:a9:08:8e:a5:9f:b3:64:c5:48:a6:f8:55:59:09:9b:6f:04:05:ef:bf:18:e5:32:4e:c9:f4:57:ba:00:11:2f
+-----BEGIN CERTIFICATE-----
+MIIFZDCCA0ygAwIBAgIQU9XP5hmTC/srBRLYwiqipDANBgkqhkiG9w0BAQwFADBM
+MS4wLAYDVQQDDCVBdG9zIFRydXN0ZWRSb290IFJvb3QgQ0EgUlNBIFRMUyAyMDIx
+MQ0wCwYDVQQKDARBdG9zMQswCQYDVQQGEwJERTAeFw0yMTA0MjIwOTIxMTBaFw00
+MTA0MTcwOTIxMDlaMEwxLjAsBgNVBAMMJUF0b3MgVHJ1c3RlZFJvb3QgUm9vdCBD
+QSBSU0EgVExTIDIwMjExDTALBgNVBAoMBEF0b3MxCzAJBgNVBAYTAkRFMIICIjAN
+BgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAtoAOxHm9BYx9sKOdTSJNy/BBl01Z
+4NH+VoyX8te9j2y3I49f1cTYQcvyAh5x5en2XssIKl4w8i1mx4QbZFc4nXUtVsYv
+Ye+W/CBGvevUez8/fEc4BKkbqlLfEzfTFRVOvV98r61jx3ncCHvVoOX3W3WsgFWZ
+kmGbzSoXfduP9LVq6hdKZChmFSlsAvFr1bqjM9xaZ6cF4r9lthawEO3NUDPJcFDs
+GY6wx/J0W2tExn2WuZgIWWbeKQGb9Cpt0xU6kGpn8bRrZtkh68rZYnxGEFzedUln
+nkL5/nWpo63/dgpnQOPF943HhZpZnmKaau1Fh5hnstVKPNe0OwANwI8f4UDErmwh
+3El+fsqyjW22v5MvoVw+j8rtgI5Y4dtXz4U2OLJxpAmMkokIiEjxQGMYsluMWuPD
+0xeqqxmjLBvk1cbiZnrXghmmOxYsL3GHX0WelXOTwkKBIROW1527k2gV+p2kHYzy
+geBYBr3JtuP2iV2J+axEoctr+hbxx1A9JNr3w+SH1VbxT5Aw+kUJWdo0zuATHAR8
+ANSbhqRAvNncTFd+rrcztl524WWLZt+NyteYr842mIycg5kDcPOvdO3GDjbnvezB
+c6eUWsuSZIKmAMFwoW4sKeFYV+xafJlrJaSQOoD0IJ2azsct+bJLKZWD6TWNp0lI
+pw9MGZHQ9b8Q4HECAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQU
+dEmZ0f+0emhFdcN+tNzMzjkz2ggwDgYDVR0PAQH/BAQDAgGGMA0GCSqGSIb3DQEB
+DAUAA4ICAQAjQ1MkYlxt/T7Cz1UAbMVWiLkO3TriJQ2VSpfKgInuKs1l+NsW4AmS
+4BjHeJi78+xCUvuppILXTdiK/ORO/auQxDh1MoSf/7OwKwIzNsAQkG8dnK/haZPs
+o0UvFJ/1TCplQ3IM98P4lYsU84UgYt1UU90s3BiVaU+DR3BAM1h3Egyi61IxHkzJ
+qM7F78PRreBrAwA0JrRUITWXAdxfG/F851X6LWh3e9NpzNMOa7pNdkTWwhWaJuyw
+xfW70Xp0wmzNxbVe9kzmWy2B27O3Opee7c9GslA9hGCZcbUztVdF5kJHdWoOsAgM
+rr3e97sPWD2PAzHoPYJQyi9eDF20l74gNAf0xBLh7tew2VktafcxBPTy+av5EzH4
+AXcOPUIjJsyacmdRIXrMPIWo6iFqO9taPKU0nprALN+AnCng33eU0aKAQv9qTFsR
+0PXNor6uzFFcw9VUewyu1rkGd4Di7wcaaMxZUa1+XGdrudviB0JbuAEFWDlN5LuY
+o7Ey7Nmj1m+UI/87tyll5gfp77YZ6ufCOB0yiJA8EytuzO+rdwY0d4RPcuSBhPm5
+dDTedk+SKlOxJTnbPP/lPqYO5Wue/9vsL3SD3460s6neFE3/MaNFcyT6lSnMEpcE
+oji2jbDwN/zIIX8/syQbPYtuzE2wFg2WHYMfRsCbvUOZ58SWLs5fyQ==
+-----END CERTIFICATE-----
+
+# Issuer: CN=TrustAsia Global Root CA G3 O=TrustAsia Technologies, Inc.
+# Subject: CN=TrustAsia Global Root CA G3 O=TrustAsia Technologies, Inc.
+# Label: "TrustAsia Global Root CA G3"
+# Serial: 576386314500428537169965010905813481816650257167
+# MD5 Fingerprint: 30:42:1b:b7:bb:81:75:35:e4:16:4f:53:d2:94:de:04
+# SHA1 Fingerprint: 63:cf:b6:c1:27:2b:56:e4:88:8e:1c:23:9a:b6:2e:81:47:24:c3:c7
+# SHA256 Fingerprint: e0:d3:22:6a:eb:11:63:c2:e4:8f:f9:be:3b:50:b4:c6:43:1b:e7:bb:1e:ac:c5:c3:6b:5d:5e:c5:09:03:9a:08
+-----BEGIN CERTIFICATE-----
+MIIFpTCCA42gAwIBAgIUZPYOZXdhaqs7tOqFhLuxibhxkw8wDQYJKoZIhvcNAQEM
+BQAwWjELMAkGA1UEBhMCQ04xJTAjBgNVBAoMHFRydXN0QXNpYSBUZWNobm9sb2dp
+ZXMsIEluYy4xJDAiBgNVBAMMG1RydXN0QXNpYSBHbG9iYWwgUm9vdCBDQSBHMzAe
+Fw0yMTA1MjAwMjEwMTlaFw00NjA1MTkwMjEwMTlaMFoxCzAJBgNVBAYTAkNOMSUw
+IwYDVQQKDBxUcnVzdEFzaWEgVGVjaG5vbG9naWVzLCBJbmMuMSQwIgYDVQQDDBtU
+cnVzdEFzaWEgR2xvYmFsIFJvb3QgQ0EgRzMwggIiMA0GCSqGSIb3DQEBAQUAA4IC
+DwAwggIKAoICAQDAMYJhkuSUGwoqZdC+BqmHO1ES6nBBruL7dOoKjbmzTNyPtxNS
+T1QY4SxzlZHFZjtqz6xjbYdT8PfxObegQ2OwxANdV6nnRM7EoYNl9lA+sX4WuDqK
+AtCWHwDNBSHvBm3dIZwZQ0WhxeiAysKtQGIXBsaqvPPW5vxQfmZCHzyLpnl5hkA1
+nyDvP+uLRx+PjsXUjrYsyUQE49RDdT/VP68czH5GX6zfZBCK70bwkPAPLfSIC7Ep
+qq+FqklYqL9joDiR5rPmd2jE+SoZhLsO4fWvieylL1AgdB4SQXMeJNnKziyhWTXA
+yB1GJ2Faj/lN03J5Zh6fFZAhLf3ti1ZwA0pJPn9pMRJpxx5cynoTi+jm9WAPzJMs
+hH/x/Gr8m0ed262IPfN2dTPXS6TIi/n1Q1hPy8gDVI+lhXgEGvNz8teHHUGf59gX
+zhqcD0r83ERoVGjiQTz+LISGNzzNPy+i2+f3VANfWdP3kXjHi3dqFuVJhZBFcnAv
+kV34PmVACxmZySYgWmjBNb9Pp1Hx2BErW+Canig7CjoKH8GB5S7wprlppYiU5msT
+f9FkPz2ccEblooV7WIQn3MSAPmeamseaMQ4w7OYXQJXZRe0Blqq/DPNL0WP3E1jA
+uPP6Z92bfW1K/zJMtSU7/xxnD4UiWQWRkUF3gdCFTIcQcf+eQxuulXUtgQIDAQAB
+o2MwYTAPBgNVHRMBAf8EBTADAQH/MB8GA1UdIwQYMBaAFEDk5PIj7zjKsK5Xf/Ih
+MBY027ySMB0GA1UdDgQWBBRA5OTyI+84yrCuV3/yITAWNNu8kjAOBgNVHQ8BAf8E
+BAMCAQYwDQYJKoZIhvcNAQEMBQADggIBACY7UeFNOPMyGLS0XuFlXsSUT9SnYaP4
+wM8zAQLpw6o1D/GUE3d3NZ4tVlFEbuHGLige/9rsR82XRBf34EzC4Xx8MnpmyFq2
+XFNFV1pF1AWZLy4jVe5jaN/TG3inEpQGAHUNcoTpLrxaatXeL1nHo+zSh2bbt1S1
+JKv0Q3jbSwTEb93mPmY+KfJLaHEih6D4sTNjduMNhXJEIlU/HHzp/LgV6FL6qj6j
+ITk1dImmasI5+njPtqzn59ZW/yOSLlALqbUHM/Q4X6RJpstlcHboCoWASzY9M/eV
+VHUl2qzEc4Jl6VL1XP04lQJqaTDFHApXB64ipCz5xUG3uOyfT0gA+QEEVcys+TIx
+xHWVBqB/0Y0n3bOppHKH/lmLmnp0Ft0WpWIp6zqW3IunaFnT63eROfjXy9mPX1on
+AX1daBli2MjN9LdyR75bl87yraKZk62Uy5P2EgmVtqvXO9A/EcswFi55gORngS1d
+7XB4tmBZrOFdRWOPyN9yaFvqHbgB8X7754qz41SgOAngPN5C8sLtLpvzHzW2Ntjj
+gKGLzZlkD8Kqq7HK9W+eQ42EVJmzbsASZthwEPEGNTNDqJwuuhQxzhB/HIbjj9LV
++Hfsm6vxL2PZQl/gZ4FkkfGXL/xuJvYz+NO1+MRiqzFRJQJ6+N1rZdVtTTDIZbpo
+FGWsJwt0ivKH
+-----END CERTIFICATE-----
+
+# Issuer: CN=TrustAsia Global Root CA G4 O=TrustAsia Technologies, Inc.
+# Subject: CN=TrustAsia Global Root CA G4 O=TrustAsia Technologies, Inc.
+# Label: "TrustAsia Global Root CA G4"
+# Serial: 451799571007117016466790293371524403291602933463
+# MD5 Fingerprint: 54:dd:b2:d7:5f:d8:3e:ed:7c:e0:0b:2e:cc:ed:eb:eb
+# SHA1 Fingerprint: 57:73:a5:61:5d:80:b2:e6:ac:38:82:fc:68:07:31:ac:9f:b5:92:5a
+# SHA256 Fingerprint: be:4b:56:cb:50:56:c0:13:6a:52:6d:f4:44:50:8d:aa:36:a0:b5:4f:42:e4:ac:38:f7:2a:f4:70:e4:79:65:4c
+-----BEGIN CERTIFICATE-----
+MIICVTCCAdygAwIBAgIUTyNkuI6XY57GU4HBdk7LKnQV1tcwCgYIKoZIzj0EAwMw
+WjELMAkGA1UEBhMCQ04xJTAjBgNVBAoMHFRydXN0QXNpYSBUZWNobm9sb2dpZXMs
+IEluYy4xJDAiBgNVBAMMG1RydXN0QXNpYSBHbG9iYWwgUm9vdCBDQSBHNDAeFw0y
+MTA1MjAwMjEwMjJaFw00NjA1MTkwMjEwMjJaMFoxCzAJBgNVBAYTAkNOMSUwIwYD
+VQQKDBxUcnVzdEFzaWEgVGVjaG5vbG9naWVzLCBJbmMuMSQwIgYDVQQDDBtUcnVz
+dEFzaWEgR2xvYmFsIFJvb3QgQ0EgRzQwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAATx
+s8045CVD5d4ZCbuBeaIVXxVjAd7Cq92zphtnS4CDr5nLrBfbK5bKfFJV4hrhPVbw
+LxYI+hW8m7tH5j/uqOFMjPXTNvk4XatwmkcN4oFBButJ+bAp3TPsUKV/eSm4IJij
+YzBhMA8GA1UdEwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAUpbtKl86zK3+kMd6Xg1mD
+pm9xy94wHQYDVR0OBBYEFKW7SpfOsyt/pDHel4NZg6ZvccveMA4GA1UdDwEB/wQE
+AwIBBjAKBggqhkjOPQQDAwNnADBkAjBe8usGzEkxn0AAbbd+NvBNEU/zy4k6LHiR
+UKNbwMp1JvK/kF0LgoxgKJ/GcJpo5PECMFxYDlZ2z1jD1xCMuo6u47xkdUfFVZDj
+/bpV6wfEU6s3qe4hsiFbYI89MvHVI5TWWA==
+-----END CERTIFICATE-----
+
+# Issuer: CN=CommScope Public Trust ECC Root-01 O=CommScope
+# Subject: CN=CommScope Public Trust ECC Root-01 O=CommScope
+# Label: "CommScope Public Trust ECC Root-01"
+# Serial: 385011430473757362783587124273108818652468453534
+# MD5 Fingerprint: 3a:40:a7:fc:03:8c:9c:38:79:2f:3a:a2:6c:b6:0a:16
+# SHA1 Fingerprint: 07:86:c0:d8:dd:8e:c0:80:98:06:98:d0:58:7a:ef:de:a6:cc:a2:5d
+# SHA256 Fingerprint: 11:43:7c:da:7b:b4:5e:41:36:5f:45:b3:9a:38:98:6b:0d:e0:0d:ef:34:8e:0c:7b:b0:87:36:33:80:0b:c3:8b
+-----BEGIN CERTIFICATE-----
+MIICHTCCAaOgAwIBAgIUQ3CCd89NXTTxyq4yLzf39H91oJ4wCgYIKoZIzj0EAwMw
+TjELMAkGA1UEBhMCVVMxEjAQBgNVBAoMCUNvbW1TY29wZTErMCkGA1UEAwwiQ29t
+bVNjb3BlIFB1YmxpYyBUcnVzdCBFQ0MgUm9vdC0wMTAeFw0yMTA0MjgxNzM1NDNa
+Fw00NjA0MjgxNzM1NDJaME4xCzAJBgNVBAYTAlVTMRIwEAYDVQQKDAlDb21tU2Nv
+cGUxKzApBgNVBAMMIkNvbW1TY29wZSBQdWJsaWMgVHJ1c3QgRUNDIFJvb3QtMDEw
+djAQBgcqhkjOPQIBBgUrgQQAIgNiAARLNumuV16ocNfQj3Rid8NeeqrltqLxeP0C
+flfdkXmcbLlSiFS8LwS+uM32ENEp7LXQoMPwiXAZu1FlxUOcw5tjnSCDPgYLpkJE
+hRGnSjot6dZoL0hOUysHP029uax3OVejQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYD
+VR0PAQH/BAQDAgEGMB0GA1UdDgQWBBSOB2LAUN3GGQYARnQE9/OufXVNMDAKBggq
+hkjOPQQDAwNoADBlAjEAnDPfQeMjqEI2Jpc1XHvr20v4qotzVRVcrHgpD7oh2MSg
+2NED3W3ROT3Ek2DS43KyAjB8xX6I01D1HiXo+k515liWpDVfG2XqYZpwI7UNo5uS
+Um9poIyNStDuiw7LR47QjRE=
+-----END CERTIFICATE-----
+
+# Issuer: CN=CommScope Public Trust ECC Root-02 O=CommScope
+# Subject: CN=CommScope Public Trust ECC Root-02 O=CommScope
+# Label: "CommScope Public Trust ECC Root-02"
+# Serial: 234015080301808452132356021271193974922492992893
+# MD5 Fingerprint: 59:b0:44:d5:65:4d:b8:5c:55:19:92:02:b6:d1:94:b2
+# SHA1 Fingerprint: 3c:3f:ef:57:0f:fe:65:93:86:9e:a0:fe:b0:f6:ed:8e:d1:13:c7:e5
+# SHA256 Fingerprint: 2f:fb:7f:81:3b:bb:b3:c8:9a:b4:e8:16:2d:0f:16:d7:15:09:a8:30:cc:9d:73:c2:62:e5:14:08:75:d1:ad:4a
+-----BEGIN CERTIFICATE-----
+MIICHDCCAaOgAwIBAgIUKP2ZYEFHpgE6yhR7H+/5aAiDXX0wCgYIKoZIzj0EAwMw
+TjELMAkGA1UEBhMCVVMxEjAQBgNVBAoMCUNvbW1TY29wZTErMCkGA1UEAwwiQ29t
+bVNjb3BlIFB1YmxpYyBUcnVzdCBFQ0MgUm9vdC0wMjAeFw0yMTA0MjgxNzQ0NTRa
+Fw00NjA0MjgxNzQ0NTNaME4xCzAJBgNVBAYTAlVTMRIwEAYDVQQKDAlDb21tU2Nv
+cGUxKzApBgNVBAMMIkNvbW1TY29wZSBQdWJsaWMgVHJ1c3QgRUNDIFJvb3QtMDIw
+djAQBgcqhkjOPQIBBgUrgQQAIgNiAAR4MIHoYx7l63FRD/cHB8o5mXxO1Q/MMDAL
+j2aTPs+9xYa9+bG3tD60B8jzljHz7aRP+KNOjSkVWLjVb3/ubCK1sK9IRQq9qEmU
+v4RDsNuESgMjGWdqb8FuvAY5N9GIIvejQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYD
+VR0PAQH/BAQDAgEGMB0GA1UdDgQWBBTmGHX/72DehKT1RsfeSlXjMjZ59TAKBggq
+hkjOPQQDAwNnADBkAjAmc0l6tqvmSfR9Uj/UQQSugEODZXW5hYA4O9Zv5JOGq4/n
+ich/m35rChJVYaoR4HkCMHfoMXGsPHED1oQmHhS48zs73u1Z/GtMMH9ZzkXpc2AV
+mkzw5l4lIhVtwodZ0LKOag==
+-----END CERTIFICATE-----
+
+# Issuer: CN=CommScope Public Trust RSA Root-01 O=CommScope
+# Subject: CN=CommScope Public Trust RSA Root-01 O=CommScope
+# Label: "CommScope Public Trust RSA Root-01"
+# Serial: 354030733275608256394402989253558293562031411421
+# MD5 Fingerprint: 0e:b4:15:bc:87:63:5d:5d:02:73:d4:26:38:68:73:d8
+# SHA1 Fingerprint: 6d:0a:5f:f7:b4:23:06:b4:85:b3:b7:97:64:fc:ac:75:f5:33:f2:93
+# SHA256 Fingerprint: 02:bd:f9:6e:2a:45:dd:9b:f1:8f:c7:e1:db:df:21:a0:37:9b:a3:c9:c2:61:03:44:cf:d8:d6:06:fe:c1:ed:81
+-----BEGIN CERTIFICATE-----
+MIIFbDCCA1SgAwIBAgIUPgNJgXUWdDGOTKvVxZAplsU5EN0wDQYJKoZIhvcNAQEL
+BQAwTjELMAkGA1UEBhMCVVMxEjAQBgNVBAoMCUNvbW1TY29wZTErMCkGA1UEAwwi
+Q29tbVNjb3BlIFB1YmxpYyBUcnVzdCBSU0EgUm9vdC0wMTAeFw0yMTA0MjgxNjQ1
+NTRaFw00NjA0MjgxNjQ1NTNaME4xCzAJBgNVBAYTAlVTMRIwEAYDVQQKDAlDb21t
+U2NvcGUxKzApBgNVBAMMIkNvbW1TY29wZSBQdWJsaWMgVHJ1c3QgUlNBIFJvb3Qt
+MDEwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCwSGWjDR1C45FtnYSk
+YZYSwu3D2iM0GXb26v1VWvZVAVMP8syMl0+5UMuzAURWlv2bKOx7dAvnQmtVzslh
+suitQDy6uUEKBU8bJoWPQ7VAtYXR1HHcg0Hz9kXHgKKEUJdGzqAMxGBWBB0HW0al
+DrJLpA6lfO741GIDuZNqihS4cPgugkY4Iw50x2tBt9Apo52AsH53k2NC+zSDO3Oj
+WiE260f6GBfZumbCk6SP/F2krfxQapWsvCQz0b2If4b19bJzKo98rwjyGpg/qYFl
+P8GMicWWMJoKz/TUyDTtnS+8jTiGU+6Xn6myY5QXjQ/cZip8UlF1y5mO6D1cv547
+KI2DAg+pn3LiLCuz3GaXAEDQpFSOm117RTYm1nJD68/A6g3czhLmfTifBSeolz7p
+UcZsBSjBAg/pGG3svZwG1KdJ9FQFa2ww8esD1eo9anbCyxooSU1/ZOD6K9pzg4H/
+kQO9lLvkuI6cMmPNn7togbGEW682v3fuHX/3SZtS7NJ3Wn2RnU3COS3kuoL4b/JO
+Hg9O5j9ZpSPcPYeoKFgo0fEbNttPxP/hjFtyjMcmAyejOQoBqsCyMWCDIqFPEgkB
+Ea801M/XrmLTBQe0MXXgDW1XT2mH+VepuhX2yFJtocucH+X8eKg1mp9BFM6ltM6U
+CBwJrVbl2rZJmkrqYxhTnCwuwwIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4G
+A1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUN12mmnQywsL5x6YVEFm45P3luG0wDQYJ
+KoZIhvcNAQELBQADggIBAK+nz97/4L1CjU3lIpbfaOp9TSp90K09FlxD533Ahuh6
+NWPxzIHIxgvoLlI1pKZJkGNRrDSsBTtXAOnTYtPZKdVUvhwQkZyybf5Z/Xn36lbQ
+nmhUQo8mUuJM3y+Xpi/SB5io82BdS5pYV4jvguX6r2yBS5KPQJqTRlnLX3gWsWc+
+QgvfKNmwrZggvkN80V4aCRckjXtdlemrwWCrWxhkgPut4AZ9HcpZuPN4KWfGVh2v
+trV0KnahP/t1MJ+UXjulYPPLXAziDslg+MkfFoom3ecnf+slpoq9uC02EJqxWE2a
+aE9gVOX2RhOOiKy8IUISrcZKiX2bwdgt6ZYD9KJ0DLwAHb/WNyVntHKLr4W96ioD
+j8z7PEQkguIBpQtZtjSNMgsSDesnwv1B10A8ckYpwIzqug/xBpMu95yo9GA+o/E4
+Xo4TwbM6l4c/ksp4qRyv0LAbJh6+cOx69TOY6lz/KwsETkPdY34Op054A5U+1C0w
+lREQKC6/oAI+/15Z0wUOlV9TRe9rh9VIzRamloPh37MG88EU26fsHItdkJANclHn
+YfkUyq+Dj7+vsQpZXdxc1+SWrVtgHdqul7I52Qb1dgAT+GhMIbA1xNxVssnBQVoc
+icCMb3SgazNNtQEo/a2tiRc7ppqEvOuM6sRxJKi6KfkIsidWNTJf6jn7MZrVGczw
+-----END CERTIFICATE-----
+
+# Issuer: CN=CommScope Public Trust RSA Root-02 O=CommScope
+# Subject: CN=CommScope Public Trust RSA Root-02 O=CommScope
+# Label: "CommScope Public Trust RSA Root-02"
+# Serial: 480062499834624527752716769107743131258796508494
+# MD5 Fingerprint: e1:29:f9:62:7b:76:e2:96:6d:f3:d4:d7:0f:ae:1f:aa
+# SHA1 Fingerprint: ea:b0:e2:52:1b:89:93:4c:11:68:f2:d8:9a:ac:22:4c:a3:8a:57:ae
+# SHA256 Fingerprint: ff:e9:43:d7:93:42:4b:4f:7c:44:0c:1c:3d:64:8d:53:63:f3:4b:82:dc:87:aa:7a:9f:11:8f:c5:de:e1:01:f1
+-----BEGIN CERTIFICATE-----
+MIIFbDCCA1SgAwIBAgIUVBa/O345lXGN0aoApYYNK496BU4wDQYJKoZIhvcNAQEL
+BQAwTjELMAkGA1UEBhMCVVMxEjAQBgNVBAoMCUNvbW1TY29wZTErMCkGA1UEAwwi
+Q29tbVNjb3BlIFB1YmxpYyBUcnVzdCBSU0EgUm9vdC0wMjAeFw0yMTA0MjgxNzE2
+NDNaFw00NjA0MjgxNzE2NDJaME4xCzAJBgNVBAYTAlVTMRIwEAYDVQQKDAlDb21t
+U2NvcGUxKzApBgNVBAMMIkNvbW1TY29wZSBQdWJsaWMgVHJ1c3QgUlNBIFJvb3Qt
+MDIwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDh+g77aAASyE3VrCLE
+NQE7xVTlWXZjpX/rwcRqmL0yjReA61260WI9JSMZNRTpf4mnG2I81lDnNJUDMrG0
+kyI9p+Kx7eZ7Ti6Hmw0zdQreqjXnfuU2mKKuJZ6VszKWpCtYHu8//mI0SFHRtI1C
+rWDaSWqVcN3SAOLMV2MCe5bdSZdbkk6V0/nLKR8YSvgBKtJjCW4k6YnS5cciTNxz
+hkcAqg2Ijq6FfUrpuzNPDlJwnZXjfG2WWy09X6GDRl224yW4fKcZgBzqZUPckXk2
+LHR88mcGyYnJ27/aaL8j7dxrrSiDeS/sOKUNNwFnJ5rpM9kzXzehxfCrPfp4sOcs
+n/Y+n2Dg70jpkEUeBVF4GiwSLFworA2iI540jwXmojPOEXcT1A6kHkIfhs1w/tku
+FT0du7jyU1fbzMZ0KZwYszZ1OC4PVKH4kh+Jlk+71O6d6Ts2QrUKOyrUZHk2EOH5
+kQMreyBUzQ0ZGshBMjTRsJnhkB4BQDa1t/qp5Xd1pCKBXbCL5CcSD1SIxtuFdOa3
+wNemKfrb3vOTlycEVS8KbzfFPROvCgCpLIscgSjX74Yxqa7ybrjKaixUR9gqiC6v
+wQcQeKwRoi9C8DfF8rhW3Q5iLc4tVn5V8qdE9isy9COoR+jUKgF4z2rDN6ieZdIs
+5fq6M8EGRPbmz6UNp2YINIos8wIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4G
+A1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUR9DnsSL/nSz12Vdgs7GxcJXvYXowDQYJ
+KoZIhvcNAQELBQADggIBAIZpsU0v6Z9PIpNojuQhmaPORVMbc0RTAIFhzTHjCLqB
+KCh6krm2qMhDnscTJk3C2OVVnJJdUNjCK9v+5qiXz1I6JMNlZFxHMaNlNRPDk7n3
++VGXu6TwYofF1gbTl4MgqX67tiHCpQ2EAOHyJxCDut0DgdXdaMNmEMjRdrSzbyme
+APnCKfWxkxlSaRosTKCL4BWaMS/TiJVZbuXEs1DIFAhKm4sTg7GkcrI7djNB3Nyq
+pgdvHSQSn8h2vS/ZjvQs7rfSOBAkNlEv41xdgSGn2rtO/+YHqP65DSdsu3BaVXoT
+6fEqSWnHX4dXTEN5bTpl6TBcQe7rd6VzEojov32u5cSoHw2OHG1QAk8mGEPej1WF
+sQs3BWDJVTkSBKEqz3EWnzZRSb9wO55nnPt7eck5HHisd5FUmrh1CoFSl+NmYWvt
+PjgelmFV4ZFUjO2MJB+ByRCac5krFk5yAD9UG/iNuovnFNa2RU9g7Jauwy8CTl2d
+lklyALKrdVwPaFsdZcJfMw8eD/A7hvWwTruc9+olBdytoptLFwG+Qt81IR2tq670
+v64fG9PiO/yzcnMcmyiQiRM9HcEARwmWmjgb3bHPDcK0RPOWlc4yOo80nOAXx17O
+rg3bhzjlP1v9mxnhMUF6cKojawHhRUzNlM47ni3niAIi9G7oyOzWPPO5std3eqx7
+-----END CERTIFICATE-----
+
+# Issuer: CN=Telekom Security TLS ECC Root 2020 O=Deutsche Telekom Security GmbH
+# Subject: CN=Telekom Security TLS ECC Root 2020 O=Deutsche Telekom Security GmbH
+# Label: "Telekom Security TLS ECC Root 2020"
+# Serial: 72082518505882327255703894282316633856
+# MD5 Fingerprint: c1:ab:fe:6a:10:2c:03:8d:bc:1c:22:32:c0:85:a7:fd
+# SHA1 Fingerprint: c0:f8:96:c5:a9:3b:01:06:21:07:da:18:42:48:bc:e9:9d:88:d5:ec
+# SHA256 Fingerprint: 57:8a:f4:de:d0:85:3f:4e:59:98:db:4a:ea:f9:cb:ea:8d:94:5f:60:b6:20:a3:8d:1a:3c:13:b2:bc:7b:a8:e1
+-----BEGIN CERTIFICATE-----
+MIICQjCCAcmgAwIBAgIQNjqWjMlcsljN0AFdxeVXADAKBggqhkjOPQQDAzBjMQsw
+CQYDVQQGEwJERTEnMCUGA1UECgweRGV1dHNjaGUgVGVsZWtvbSBTZWN1cml0eSBH
+bWJIMSswKQYDVQQDDCJUZWxla29tIFNlY3VyaXR5IFRMUyBFQ0MgUm9vdCAyMDIw
+MB4XDTIwMDgyNTA3NDgyMFoXDTQ1MDgyNTIzNTk1OVowYzELMAkGA1UEBhMCREUx
+JzAlBgNVBAoMHkRldXRzY2hlIFRlbGVrb20gU2VjdXJpdHkgR21iSDErMCkGA1UE
+AwwiVGVsZWtvbSBTZWN1cml0eSBUTFMgRUNDIFJvb3QgMjAyMDB2MBAGByqGSM49
+AgEGBSuBBAAiA2IABM6//leov9Wq9xCazbzREaK9Z0LMkOsVGJDZos0MKiXrPk/O
+tdKPD/M12kOLAoC+b1EkHQ9rK8qfwm9QMuU3ILYg/4gND21Ju9sGpIeQkpT0CdDP
+f8iAC8GXs7s1J8nCG6NCMEAwHQYDVR0OBBYEFONyzG6VmUex5rNhTNHLq+O6zd6f
+MA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMAoGCCqGSM49BAMDA2cA
+MGQCMHVSi7ekEE+uShCLsoRbQuHmKjYC2qBuGT8lv9pZMo7k+5Dck2TOrbRBR2Di
+z6fLHgIwN0GMZt9Ba9aDAEH9L1r3ULRn0SyocddDypwnJJGDSA3PzfdUga/sf+Rn
+27iQ7t0l
+-----END CERTIFICATE-----
+
+# Issuer: CN=Telekom Security TLS RSA Root 2023 O=Deutsche Telekom Security GmbH
+# Subject: CN=Telekom Security TLS RSA Root 2023 O=Deutsche Telekom Security GmbH
+# Label: "Telekom Security TLS RSA Root 2023"
+# Serial: 44676229530606711399881795178081572759
+# MD5 Fingerprint: bf:5b:eb:54:40:cd:48:71:c4:20:8d:7d:de:0a:42:f2
+# SHA1 Fingerprint: 54:d3:ac:b3:bd:57:56:f6:85:9d:ce:e5:c3:21:e2:d4:ad:83:d0:93
+# SHA256 Fingerprint: ef:c6:5c:ad:bb:59:ad:b6:ef:e8:4d:a2:23:11:b3:56:24:b7:1b:3b:1e:a0:da:8b:66:55:17:4e:c8:97:86:46
+-----BEGIN CERTIFICATE-----
+MIIFszCCA5ugAwIBAgIQIZxULej27HF3+k7ow3BXlzANBgkqhkiG9w0BAQwFADBj
+MQswCQYDVQQGEwJERTEnMCUGA1UECgweRGV1dHNjaGUgVGVsZWtvbSBTZWN1cml0
+eSBHbWJIMSswKQYDVQQDDCJUZWxla29tIFNlY3VyaXR5IFRMUyBSU0EgUm9vdCAy
+MDIzMB4XDTIzMDMyODEyMTY0NVoXDTQ4MDMyNzIzNTk1OVowYzELMAkGA1UEBhMC
+REUxJzAlBgNVBAoMHkRldXRzY2hlIFRlbGVrb20gU2VjdXJpdHkgR21iSDErMCkG
+A1UEAwwiVGVsZWtvbSBTZWN1cml0eSBUTFMgUlNBIFJvb3QgMjAyMzCCAiIwDQYJ
+KoZIhvcNAQEBBQADggIPADCCAgoCggIBAO01oYGA88tKaVvC+1GDrib94W7zgRJ9
+cUD/h3VCKSHtgVIs3xLBGYSJwb3FKNXVS2xE1kzbB5ZKVXrKNoIENqil/Cf2SfHV
+cp6R+SPWcHu79ZvB7JPPGeplfohwoHP89v+1VmLhc2o0mD6CuKyVU/QBoCcHcqMA
+U6DksquDOFczJZSfvkgdmOGjup5czQRxUX11eKvzWarE4GC+j4NSuHUaQTXtvPM6
+Y+mpFEXX5lLRbtLevOP1Czvm4MS9Q2QTps70mDdsipWol8hHD/BeEIvnHRz+sTug
+BTNoBUGCwQMrAcjnj02r6LX2zWtEtefdi+zqJbQAIldNsLGyMcEWzv/9FIS3R/qy
+8XDe24tsNlikfLMR0cN3f1+2JeANxdKz+bi4d9s3cXFH42AYTyS2dTd4uaNir73J
+co4vzLuu2+QVUhkHM/tqty1LkCiCc/4YizWN26cEar7qwU02OxY2kTLvtkCJkUPg
+8qKrBC7m8kwOFjQgrIfBLX7JZkcXFBGk8/ehJImr2BrIoVyxo/eMbcgByU/J7MT8
+rFEz0ciD0cmfHdRHNCk+y7AO+oMLKFjlKdw/fKifybYKu6boRhYPluV75Gp6SG12
+mAWl3G0eQh5C2hrgUve1g8Aae3g1LDj1H/1Joy7SWWO/gLCMk3PLNaaZlSJhZQNg
++y+TS/qanIA7AgMBAAGjYzBhMA4GA1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUtqeX
+gj10hZv3PJ+TmpV5dVKMbUcwDwYDVR0TAQH/BAUwAwEB/zAfBgNVHSMEGDAWgBS2
+p5eCPXSFm/c8n5OalXl1UoxtRzANBgkqhkiG9w0BAQwFAAOCAgEAqMxhpr51nhVQ
+pGv7qHBFfLp+sVr8WyP6Cnf4mHGCDG3gXkaqk/QeoMPhk9tLrbKmXauw1GLLXrtm
+9S3ul0A8Yute1hTWjOKWi0FpkzXmuZlrYrShF2Y0pmtjxrlO8iLpWA1WQdH6DErw
+M807u20hOq6OcrXDSvvpfeWxm4bu4uB9tPcy/SKE8YXJN3nptT+/XOR0so8RYgDd
+GGah2XsjX/GO1WfoVNpbOms2b/mBsTNHM3dA+VKq3dSDz4V4mZqTuXNnQkYRIer+
+CqkbGmVps4+uFrb2S1ayLfmlyOw7YqPta9BO1UAJpB+Y1zqlklkg5LB9zVtzaL1t
+xKITDmcZuI1CfmwMmm6gJC3VRRvcxAIU/oVbZZfKTpBQCHpCNfnqwmbU+AGuHrS+
+w6jv/naaoqYfRvaE7fzbzsQCzndILIyy7MMAo+wsVRjBfhnu4S/yrYObnqsZ38aK
+L4x35bcF7DvB7L6Gs4a8wPfc5+pbrrLMtTWGS9DiP7bY+A4A7l3j941Y/8+LN+lj
+X273CXE2whJdV/LItM3z7gLfEdxquVeEHVlNjM7IDiPCtyaaEBRx/pOyiriA8A4Q
+ntOoUAw3gi/q4Iqd4Sw5/7W0cwDk90imc6y/st53BIe0o82bNSQ3+pCTE4FCxpgm
+dTdmQRCsu/WU48IxK63nI1bMNSWSs1A=
+-----END CERTIFICATE-----
+
+# Issuer: CN=FIRMAPROFESIONAL CA ROOT-A WEB O=Firmaprofesional SA
+# Subject: CN=FIRMAPROFESIONAL CA ROOT-A WEB O=Firmaprofesional SA
+# Label: "FIRMAPROFESIONAL CA ROOT-A WEB"
+# Serial: 65916896770016886708751106294915943533
+# MD5 Fingerprint: 82:b2:ad:45:00:82:b0:66:63:f8:5f:c3:67:4e:ce:a3
+# SHA1 Fingerprint: a8:31:11:74:a6:14:15:0d:ca:77:dd:0e:e4:0c:5d:58:fc:a0:72:a5
+# SHA256 Fingerprint: be:f2:56:da:f2:6e:9c:69:bd:ec:16:02:35:97:98:f3:ca:f7:18:21:a0:3e:01:82:57:c5:3c:65:61:7f:3d:4a
+-----BEGIN CERTIFICATE-----
+MIICejCCAgCgAwIBAgIQMZch7a+JQn81QYehZ1ZMbTAKBggqhkjOPQQDAzBuMQsw
+CQYDVQQGEwJFUzEcMBoGA1UECgwTRmlybWFwcm9mZXNpb25hbCBTQTEYMBYGA1UE
+YQwPVkFURVMtQTYyNjM0MDY4MScwJQYDVQQDDB5GSVJNQVBST0ZFU0lPTkFMIENB
+IFJPT1QtQSBXRUIwHhcNMjIwNDA2MDkwMTM2WhcNNDcwMzMxMDkwMTM2WjBuMQsw
+CQYDVQQGEwJFUzEcMBoGA1UECgwTRmlybWFwcm9mZXNpb25hbCBTQTEYMBYGA1UE
+YQwPVkFURVMtQTYyNjM0MDY4MScwJQYDVQQDDB5GSVJNQVBST0ZFU0lPTkFMIENB
+IFJPT1QtQSBXRUIwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAARHU+osEaR3xyrq89Zf
+e9MEkVz6iMYiuYMQYneEMy3pA4jU4DP37XcsSmDq5G+tbbT4TIqk5B/K6k84Si6C
+cyvHZpsKjECcfIr28jlgst7L7Ljkb+qbXbdTkBgyVcUgt5SjYzBhMA8GA1UdEwEB
+/wQFMAMBAf8wHwYDVR0jBBgwFoAUk+FDY1w8ndYn81LsF7Kpryz3dvgwHQYDVR0O
+BBYEFJPhQ2NcPJ3WJ/NS7Beyqa8s93b4MA4GA1UdDwEB/wQEAwIBBjAKBggqhkjO
+PQQDAwNoADBlAjAdfKR7w4l1M+E7qUW/Runpod3JIha3RxEL2Jq68cgLcFBTApFw
+hVmpHqTm6iMxoAACMQD94vizrxa5HnPEluPBMBnYfubDl94cT7iJLzPrSA8Z94dG
+XSaQpYXFuXqUPoeovQA=
+-----END CERTIFICATE-----
+
+# Issuer: CN=TWCA CYBER Root CA O=TAIWAN-CA OU=Root CA
+# Subject: CN=TWCA CYBER Root CA O=TAIWAN-CA OU=Root CA
+# Label: "TWCA CYBER Root CA"
+# Serial: 85076849864375384482682434040119489222
+# MD5 Fingerprint: 0b:33:a0:97:52:95:d4:a9:fd:bb:db:6e:a3:55:5b:51
+# SHA1 Fingerprint: f6:b1:1c:1a:83:38:e9:7b:db:b3:a8:c8:33:24:e0:2d:9c:7f:26:66
+# SHA256 Fingerprint: 3f:63:bb:28:14:be:17:4e:c8:b6:43:9c:f0:8d:6d:56:f0:b7:c4:05:88:3a:56:48:a3:34:42:4d:6b:3e:c5:58
+-----BEGIN CERTIFICATE-----
+MIIFjTCCA3WgAwIBAgIQQAE0jMIAAAAAAAAAATzyxjANBgkqhkiG9w0BAQwFADBQ
+MQswCQYDVQQGEwJUVzESMBAGA1UEChMJVEFJV0FOLUNBMRAwDgYDVQQLEwdSb290
+IENBMRswGQYDVQQDExJUV0NBIENZQkVSIFJvb3QgQ0EwHhcNMjIxMTIyMDY1NDI5
+WhcNNDcxMTIyMTU1OTU5WjBQMQswCQYDVQQGEwJUVzESMBAGA1UEChMJVEFJV0FO
+LUNBMRAwDgYDVQQLEwdSb290IENBMRswGQYDVQQDExJUV0NBIENZQkVSIFJvb3Qg
+Q0EwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDG+Moe2Qkgfh1sTs6P
+40czRJzHyWmqOlt47nDSkvgEs1JSHWdyKKHfi12VCv7qze33Kc7wb3+szT3vsxxF
+avcokPFhV8UMxKNQXd7UtcsZyoC5dc4pztKFIuwCY8xEMCDa6pFbVuYdHNWdZsc/
+34bKS1PE2Y2yHer43CdTo0fhYcx9tbD47nORxc5zb87uEB8aBs/pJ2DFTxnk684i
+JkXXYJndzk834H/nY62wuFm40AZoNWDTNq5xQwTxaWV4fPMf88oon1oglWa0zbfu
+j3ikRRjpJi+NmykosaS3Om251Bw4ckVYsV7r8Cibt4LK/c/WMw+f+5eesRycnupf
+Xtuq3VTpMCEobY5583WSjCb+3MX2w7DfRFlDo7YDKPYIMKoNM+HvnKkHIuNZW0CP
+2oi3aQiotyMuRAlZN1vH4xfyIutuOVLF3lSnmMlLIJXcRolftBL5hSmO68gnFSDA
+S9TMfAxsNAwmmyYxpjyn9tnQS6Jk/zuZQXLB4HCX8SS7K8R0IrGsayIyJNN4KsDA
+oS/xUgXJP+92ZuJF2A09rZXIx4kmyA+upwMu+8Ff+iDhcK2wZSA3M2Cw1a/XDBzC
+kHDXShi8fgGwsOsVHkQGzaRP6AzRwyAQ4VRlnrZR0Bp2a0JaWHY06rc3Ga4udfmW
+5cFZ95RXKSWNOkyrTZpB0F8mAwIDAQABo2MwYTAOBgNVHQ8BAf8EBAMCAQYwDwYD
+VR0TAQH/BAUwAwEB/zAfBgNVHSMEGDAWgBSdhWEUfMFib5do5E83QOGt4A1WNzAd
+BgNVHQ4EFgQUnYVhFHzBYm+XaORPN0DhreANVjcwDQYJKoZIhvcNAQEMBQADggIB
+AGSPesRiDrWIzLjHhg6hShbNcAu3p4ULs3a2D6f/CIsLJc+o1IN1KriWiLb73y0t
+tGlTITVX1olNc79pj3CjYcya2x6a4CD4bLubIp1dhDGaLIrdaqHXKGnK/nZVekZn
+68xDiBaiA9a5F/gZbG0jAn/xX9AKKSM70aoK7akXJlQKTcKlTfjF/biBzysseKNn
+TKkHmvPfXvt89YnNdJdhEGoHK4Fa0o635yDRIG4kqIQnoVesqlVYL9zZyvpoBJ7t
+RCT5dEA7IzOrg1oYJkK2bVS1FmAwbLGg+LhBoF1JSdJlBTrq/p1hvIbZv97Tujqx
+f36SNI7JAG7cmL3c7IAFrQI932XtCwP39xaEBDG6k5TY8hL4iuO/Qq+n1M0RFxbI
+Qh0UqEL20kCGoE8jypZFVmAGzbdVAaYBlGX+bgUJurSkquLvWL69J1bY73NxW0Qz
+8ppy6rBePm6pUlvscG21h483XjyMnM7k8M4MZ0HMzvaAq07MTFb1wWFZk7Q+ptq4
+NxKfKjLji7gh7MMrZQzvIt6IKTtM1/r+t+FHvpw+PoP7UV31aPcuIYXcv/Fa4nzX
+xeSDwWrruoBa3lwtcHb4yOWHh8qgnaHlIhInD0Q9HWzq1MKLL295q39QpsQZp6F6
+t5b5wR9iWqJDB0BeJsas7a5wFsWqynKKTbDPAYsDP27X
+-----END CERTIFICATE-----
+
+# Issuer: CN=SecureSign Root CA12 O=Cybertrust Japan Co., Ltd.
+# Subject: CN=SecureSign Root CA12 O=Cybertrust Japan Co., Ltd.
+# Label: "SecureSign Root CA12"
+# Serial: 587887345431707215246142177076162061960426065942
+# MD5 Fingerprint: c6:89:ca:64:42:9b:62:08:49:0b:1e:7f:e9:07:3d:e8
+# SHA1 Fingerprint: 7a:22:1e:3d:de:1b:06:ac:9e:c8:47:70:16:8e:3c:e5:f7:6b:06:f4
+# SHA256 Fingerprint: 3f:03:4b:b5:70:4d:44:b2:d0:85:45:a0:20:57:de:93:eb:f3:90:5f:ce:72:1a:cb:c7:30:c0:6d:da:ee:90:4e
+-----BEGIN CERTIFICATE-----
+MIIDcjCCAlqgAwIBAgIUZvnHwa/swlG07VOX5uaCwysckBYwDQYJKoZIhvcNAQEL
+BQAwUTELMAkGA1UEBhMCSlAxIzAhBgNVBAoTGkN5YmVydHJ1c3QgSmFwYW4gQ28u
+LCBMdGQuMR0wGwYDVQQDExRTZWN1cmVTaWduIFJvb3QgQ0ExMjAeFw0yMDA0MDgw
+NTM2NDZaFw00MDA0MDgwNTM2NDZaMFExCzAJBgNVBAYTAkpQMSMwIQYDVQQKExpD
+eWJlcnRydXN0IEphcGFuIENvLiwgTHRkLjEdMBsGA1UEAxMUU2VjdXJlU2lnbiBS
+b290IENBMTIwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQC6OcE3emhF
+KxS06+QT61d1I02PJC0W6K6OyX2kVzsqdiUzg2zqMoqUm048luT9Ub+ZyZN+v/mt
+p7JIKwccJ/VMvHASd6SFVLX9kHrko+RRWAPNEHl57muTH2SOa2SroxPjcf59q5zd
+J1M3s6oYwlkm7Fsf0uZlfO+TvdhYXAvA42VvPMfKWeP+bl+sg779XSVOKik71gur
+FzJ4pOE+lEa+Ym6b3kaosRbnhW70CEBFEaCeVESE99g2zvVQR9wsMJvuwPWW0v4J
+hscGWa5Pro4RmHvzC1KqYiaqId+OJTN5lxZJjfU+1UefNzFJM3IFTQy2VYzxV4+K
+h9GtxRESOaCtAgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQD
+AgEGMB0GA1UdDgQWBBRXNPN0zwRL1SXm8UC2LEzZLemgrTANBgkqhkiG9w0BAQsF
+AAOCAQEAPrvbFxbS8hQBICw4g0utvsqFepq2m2um4fylOqyttCg6r9cBg0krY6Ld
+mmQOmFxv3Y67ilQiLUoT865AQ9tPkbeGGuwAtEGBpE/6aouIs3YIcipJQMPTw4WJ
+mBClnW8Zt7vPemVV2zfrPIpyMpcemik+rY3moxtt9XUa5rBouVui7mlHJzWhhpmA
+8zNL4WukJsPvdFlseqJkth5Ew1DgDzk9qTPxpfPSvWKErI4cqc1avTc7bgoitPQV
+55FYxTpE05Uo2cBl6XLK0A+9H7MV2anjpEcJnuDLN/v9vZfVvhgaaaI5gdka9at/
+yOPiZwud9AzqVN/Ssq+xIvEg37xEHA==
+-----END CERTIFICATE-----
+
+# Issuer: CN=SecureSign Root CA14 O=Cybertrust Japan Co., Ltd.
+# Subject: CN=SecureSign Root CA14 O=Cybertrust Japan Co., Ltd.
+# Label: "SecureSign Root CA14"
+# Serial: 575790784512929437950770173562378038616896959179
+# MD5 Fingerprint: 71:0d:72:fa:92:19:65:5e:89:04:ac:16:33:f0:bc:d5
+# SHA1 Fingerprint: dd:50:c0:f7:79:b3:64:2e:74:a2:b8:9d:9f:d3:40:dd:bb:f0:f2:4f
+# SHA256 Fingerprint: 4b:00:9c:10:34:49:4f:9a:b5:6b:ba:3b:a1:d6:27:31:fc:4d:20:d8:95:5a:dc:ec:10:a9:25:60:72:61:e3:38
+-----BEGIN CERTIFICATE-----
+MIIFcjCCA1qgAwIBAgIUZNtaDCBO6Ncpd8hQJ6JaJ90t8sswDQYJKoZIhvcNAQEM
+BQAwUTELMAkGA1UEBhMCSlAxIzAhBgNVBAoTGkN5YmVydHJ1c3QgSmFwYW4gQ28u
+LCBMdGQuMR0wGwYDVQQDExRTZWN1cmVTaWduIFJvb3QgQ0ExNDAeFw0yMDA0MDgw
+NzA2MTlaFw00NTA0MDgwNzA2MTlaMFExCzAJBgNVBAYTAkpQMSMwIQYDVQQKExpD
+eWJlcnRydXN0IEphcGFuIENvLiwgTHRkLjEdMBsGA1UEAxMUU2VjdXJlU2lnbiBS
+b290IENBMTQwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDF0nqh1oq/
+FjHQmNE6lPxauG4iwWL3pwon71D2LrGeaBLwbCRjOfHw3xDG3rdSINVSW0KZnvOg
+vlIfX8xnbacuUKLBl422+JX1sLrcneC+y9/3OPJH9aaakpUqYllQC6KxNedlsmGy
+6pJxaeQp8E+BgQQ8sqVb1MWoWWd7VRxJq3qdwudzTe/NCcLEVxLbAQ4jeQkHO6Lo
+/IrPj8BGJJw4J+CDnRugv3gVEOuGTgpa/d/aLIJ+7sr2KeH6caH3iGicnPCNvg9J
+kdjqOvn90Ghx2+m1K06Ckm9mH+Dw3EzsytHqunQG+bOEkJTRX45zGRBdAuVwpcAQ
+0BB8b8VYSbSwbprafZX1zNoCr7gsfXmPvkPx+SgojQlD+Ajda8iLLCSxjVIHvXib
+y8posqTdDEx5YMaZ0ZPxMBoH064iwurO8YQJzOAUbn8/ftKChazcqRZOhaBgy/ac
+18izju3Gm5h1DVXoX+WViwKkrkMpKBGk5hIwAUt1ax5mnXkvpXYvHUC0bcl9eQjs
+0Wq2XSqypWa9a4X0dFbD9ed1Uigspf9mR6XU/v6eVL9lfgHWMI+lNpyiUBzuOIAB
+SMbHdPTGrMNASRZhdCyvjG817XsYAFs2PJxQDcqSMxDxJklt33UkN4Ii1+iW/RVL
+ApY+B3KVfqs9TC7XyvDf4Fg/LS8EmjijAQIDAQABo0IwQDAPBgNVHRMBAf8EBTAD
+AQH/MA4GA1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUBpOjCl4oaTeqYR3r6/wtbyPk
+86AwDQYJKoZIhvcNAQEMBQADggIBAJaAcgkGfpzMkwQWu6A6jZJOtxEaCnFxEM0E
+rX+lRVAQZk5KQaID2RFPeje5S+LGjzJmdSX7684/AykmjbgWHfYfM25I5uj4V7Ib
+ed87hwriZLoAymzvftAj63iP/2SbNDefNWWipAA9EiOWWF3KY4fGoweITedpdopT
+zfFP7ELyk+OZpDc8h7hi2/DsHzc/N19DzFGdtfCXwreFamgLRB7lUe6TzktuhsHS
+DCRZNhqfLJGP4xjblJUK7ZGqDpncllPjYYPGFrojutzdfhrGe0K22VoF3Jpf1d+4
+2kd92jjbrDnVHmtsKheMYc2xbXIBw8MgAGJoFjHVdqqGuw6qnsb58Nn4DSEC5MUo
+FlkRudlpcyqSeLiSV5sI8jrlL5WwWLdrIBRtFO8KvH7YVdiI2i/6GaX7i+B/OfVy
+K4XELKzvGUWSTLNhB9xNH27SgRNcmvMSZ4PPmz+Ln52kuaiWA3rF7iDeM9ovnhp6
+dB7h7sxaOgTdsxoEqBRjrLdHEoOabPXm6RUVkRqEGQ6UROcSjiVbgGcZ3GOTEAtl
+Lor6CZpO2oYofaphNdgOpygau1LgePhsumywbrmHXumZNTfxPWQrqaA0k89jL9WB
+365jJ6UeTo3cKXhZ+PmhIIynJkBugnLNeLLIjzwec+fBH7/PzqUqm9tEZDKgu39c
+JRNItX+S
+-----END CERTIFICATE-----
+
+# Issuer: CN=SecureSign Root CA15 O=Cybertrust Japan Co., Ltd.
+# Subject: CN=SecureSign Root CA15 O=Cybertrust Japan Co., Ltd.
+# Label: "SecureSign Root CA15"
+# Serial: 126083514594751269499665114766174399806381178503
+# MD5 Fingerprint: 13:30:fc:c4:62:a6:a9:de:b5:c1:68:af:b5:d2:31:47
+# SHA1 Fingerprint: cb:ba:83:c8:c1:5a:5d:f1:f9:73:6f:ca:d7:ef:28:13:06:4a:07:7d
+# SHA256 Fingerprint: e7:78:f0:f0:95:fe:84:37:29:cd:1a:00:82:17:9e:53:14:a9:c2:91:44:28:05:e1:fb:1d:8f:b6:b8:88:6c:3a
+-----BEGIN CERTIFICATE-----
+MIICIzCCAamgAwIBAgIUFhXHw9hJp75pDIqI7fBw+d23PocwCgYIKoZIzj0EAwMw
+UTELMAkGA1UEBhMCSlAxIzAhBgNVBAoTGkN5YmVydHJ1c3QgSmFwYW4gQ28uLCBM
+dGQuMR0wGwYDVQQDExRTZWN1cmVTaWduIFJvb3QgQ0ExNTAeFw0yMDA0MDgwODMy
+NTZaFw00NTA0MDgwODMyNTZaMFExCzAJBgNVBAYTAkpQMSMwIQYDVQQKExpDeWJl
+cnRydXN0IEphcGFuIENvLiwgTHRkLjEdMBsGA1UEAxMUU2VjdXJlU2lnbiBSb290
+IENBMTUwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAAQLUHSNZDKZmbPSYAi4Io5GdCx4
+wCtELW1fHcmuS1Iggz24FG1Th2CeX2yF2wYUleDHKP+dX+Sq8bOLbe1PL0vJSpSR
+ZHX+AezB2Ot6lHhWGENfa4HL9rzatAy2KZMIaY+jQjBAMA8GA1UdEwEB/wQFMAMB
+Af8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBTrQciu/NWeUUj1vYv0hyCTQSvT
+9DAKBggqhkjOPQQDAwNoADBlAjEA2S6Jfl5OpBEHvVnCB96rMjhTKkZEBhd6zlHp
+4P9mLQlO4E/0BdGF9jVg3PVys0Z9AjBEmEYagoUeYWmJSwdLZrWeqrqgHkHZAXQ6
+bkU6iYAZezKYVWOr62Nuk22rGwlgMU4=
+-----END CERTIFICATE-----
diff --git a/lambdas/aws-dd-forwarder-3.127.0/certifi/core.py b/lambdas/aws-dd-forwarder-3.127.0/certifi/core.py
new file mode 100644
index 0000000..91f538b
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/certifi/core.py
@@ -0,0 +1,114 @@
+"""
+certifi.py
+~~~~~~~~~~
+
+This module returns the installation location of cacert.pem or its contents.
+"""
+import sys
+import atexit
+
+def exit_cacert_ctx() -> None:
+ _CACERT_CTX.__exit__(None, None, None) # type: ignore[union-attr]
+
+
+if sys.version_info >= (3, 11):
+
+ from importlib.resources import as_file, files
+
+ _CACERT_CTX = None
+ _CACERT_PATH = None
+
+ def where() -> str:
+ # This is slightly terrible, but we want to delay extracting the file
+ # in cases where we're inside of a zipimport situation until someone
+ # actually calls where(), but we don't want to re-extract the file
+ # on every call of where(), so we'll do it once then store it in a
+ # global variable.
+ global _CACERT_CTX
+ global _CACERT_PATH
+ if _CACERT_PATH is None:
+ # This is slightly janky, the importlib.resources API wants you to
+ # manage the cleanup of this file, so it doesn't actually return a
+ # path, it returns a context manager that will give you the path
+ # when you enter it and will do any cleanup when you leave it. In
+ # the common case of not needing a temporary file, it will just
+ # return the file system location and the __exit__() is a no-op.
+ #
+ # We also have to hold onto the actual context manager, because
+ # it will do the cleanup whenever it gets garbage collected, so
+ # we will also store that at the global level as well.
+ _CACERT_CTX = as_file(files("certifi").joinpath("cacert.pem"))
+ _CACERT_PATH = str(_CACERT_CTX.__enter__())
+ atexit.register(exit_cacert_ctx)
+
+ return _CACERT_PATH
+
+ def contents() -> str:
+ return files("certifi").joinpath("cacert.pem").read_text(encoding="ascii")
+
+elif sys.version_info >= (3, 7):
+
+ from importlib.resources import path as get_path, read_text
+
+ _CACERT_CTX = None
+ _CACERT_PATH = None
+
+ def where() -> str:
+ # This is slightly terrible, but we want to delay extracting the
+ # file in cases where we're inside of a zipimport situation until
+ # someone actually calls where(), but we don't want to re-extract
+ # the file on every call of where(), so we'll do it once then store
+ # it in a global variable.
+ global _CACERT_CTX
+ global _CACERT_PATH
+ if _CACERT_PATH is None:
+ # This is slightly janky, the importlib.resources API wants you
+ # to manage the cleanup of this file, so it doesn't actually
+ # return a path, it returns a context manager that will give
+ # you the path when you enter it and will do any cleanup when
+ # you leave it. In the common case of not needing a temporary
+ # file, it will just return the file system location and the
+ # __exit__() is a no-op.
+ #
+ # We also have to hold onto the actual context manager, because
+ # it will do the cleanup whenever it gets garbage collected, so
+ # we will also store that at the global level as well.
+ _CACERT_CTX = get_path("certifi", "cacert.pem")
+ _CACERT_PATH = str(_CACERT_CTX.__enter__())
+ atexit.register(exit_cacert_ctx)
+
+ return _CACERT_PATH
+
+ def contents() -> str:
+ return read_text("certifi", "cacert.pem", encoding="ascii")
+
+else:
+ import os
+ import types
+ from typing import Union
+
+ Package = Union[types.ModuleType, str]
+ Resource = Union[str, "os.PathLike"]
+
+ # This fallback will work for Python versions prior to 3.7 that lack the
+ # importlib.resources module but relies on the existing `where` function
+ # so won't address issues with environments like PyOxidizer that don't set
+ # __file__ on modules.
+ def read_text(
+ package: Package,
+ resource: Resource,
+ encoding: str = 'utf-8',
+ errors: str = 'strict'
+ ) -> str:
+ with open(where(), encoding=encoding) as data:
+ return data.read()
+
+ # If we don't have importlib.resources, then we will just do the old logic
+ # of assuming we're on the filesystem and munge the path directly.
+ def where() -> str:
+ f = os.path.dirname(__file__)
+
+ return os.path.join(f, "cacert.pem")
+
+ def contents() -> str:
+ return read_text("certifi", "cacert.pem", encoding="ascii")
diff --git a/lambdas/aws-dd-forwarder-3.127.0/certifi/py.typed b/lambdas/aws-dd-forwarder-3.127.0/certifi/py.typed
new file mode 100644
index 0000000..e69de29
diff --git a/lambdas/aws-dd-forwarder-3.127.0/charset_normalizer-3.4.0.dist-info/INSTALLER b/lambdas/aws-dd-forwarder-3.127.0/charset_normalizer-3.4.0.dist-info/INSTALLER
new file mode 100644
index 0000000..a1b589e
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/charset_normalizer-3.4.0.dist-info/INSTALLER
@@ -0,0 +1 @@
+pip
diff --git a/lambdas/aws-dd-forwarder-3.127.0/charset_normalizer-3.4.0.dist-info/LICENSE b/lambdas/aws-dd-forwarder-3.127.0/charset_normalizer-3.4.0.dist-info/LICENSE
new file mode 100644
index 0000000..ad82355
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/charset_normalizer-3.4.0.dist-info/LICENSE
@@ -0,0 +1,21 @@
+MIT License
+
+Copyright (c) 2019 TAHRI Ahmed R.
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
\ No newline at end of file
diff --git a/lambdas/aws-dd-forwarder-3.127.0/charset_normalizer-3.4.0.dist-info/METADATA b/lambdas/aws-dd-forwarder-3.127.0/charset_normalizer-3.4.0.dist-info/METADATA
new file mode 100644
index 0000000..b19096b
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/charset_normalizer-3.4.0.dist-info/METADATA
@@ -0,0 +1,695 @@
+Metadata-Version: 2.1
+Name: charset-normalizer
+Version: 3.4.0
+Summary: The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet.
+Home-page: https://github.com/Ousret/charset_normalizer
+Author: Ahmed TAHRI
+Author-email: tahri.ahmed@proton.me
+License: MIT
+Project-URL: Bug Reports, https://github.com/Ousret/charset_normalizer/issues
+Project-URL: Documentation, https://charset-normalizer.readthedocs.io/en/latest
+Keywords: encoding,charset,charset-detector,detector,normalization,unicode,chardet,detect
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: License :: OSI Approved :: MIT License
+Classifier: Intended Audience :: Developers
+Classifier: Topic :: Software Development :: Libraries :: Python Modules
+Classifier: Operating System :: OS Independent
+Classifier: Programming Language :: Python
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3.7
+Classifier: Programming Language :: Python :: 3.8
+Classifier: Programming Language :: Python :: 3.9
+Classifier: Programming Language :: Python :: 3.10
+Classifier: Programming Language :: Python :: 3.11
+Classifier: Programming Language :: Python :: 3.12
+Classifier: Programming Language :: Python :: 3.13
+Classifier: Programming Language :: Python :: Implementation :: PyPy
+Classifier: Topic :: Text Processing :: Linguistic
+Classifier: Topic :: Utilities
+Classifier: Typing :: Typed
+Requires-Python: >=3.7.0
+Description-Content-Type: text/markdown
+License-File: LICENSE
+Provides-Extra: unicode_backport
+
+Charset Detection, for Everyone 👋
+
+
+ The Real First Universal Charset Detector
+
+
+
+
+
+
+
+
+
+
+
+ Featured Packages
+
+
+
+
+
+
+
+
+ In other language (unofficial port - by the community)
+
+
+
+
+
+> A library that helps you read text from an unknown charset encoding.
Motivated by `chardet`,
+> I'm trying to resolve the issue by taking a new approach.
+> All IANA character set names for which the Python core library provides codecs are supported.
+
+
+ >>>>> 👉 Try Me Online Now, Then Adopt Me 👈 <<<<<
+
+
+This project offers you an alternative to **Universal Charset Encoding Detector**, also known as **Chardet**.
+
+| Feature | [Chardet](https://github.com/chardet/chardet) | Charset Normalizer | [cChardet](https://github.com/PyYoshi/cChardet) |
+|--------------------------------------------------|:---------------------------------------------:|:--------------------------------------------------------------------------------------------------:|:-----------------------------------------------:|
+| `Fast` | ❌ | ✅ | ✅ |
+| `Universal**` | ❌ | ✅ | ❌ |
+| `Reliable` **without** distinguishable standards | ❌ | ✅ | ✅ |
+| `Reliable` **with** distinguishable standards | ✅ | ✅ | ✅ |
+| `License` | LGPL-2.1
_restrictive_ | MIT | MPL-1.1
_restrictive_ |
+| `Native Python` | ✅ | ✅ | ❌ |
+| `Detect spoken language` | ❌ | ✅ | N/A |
+| `UnicodeDecodeError Safety` | ❌ | ✅ | ❌ |
+| `Whl Size (min)` | 193.6 kB | 42 kB | ~200 kB |
+| `Supported Encoding` | 33 | 🎉 [99](https://charset-normalizer.readthedocs.io/en/latest/user/support.html#supported-encodings) | 40 |
+
+
+
+
+
+*\*\* : They are clearly using specific code for a specific encoding even if covering most of used one*
+Did you got there because of the logs? See [https://charset-normalizer.readthedocs.io/en/latest/user/miscellaneous.html](https://charset-normalizer.readthedocs.io/en/latest/user/miscellaneous.html)
+
+## ⚡ Performance
+
+This package offer better performance than its counterpart Chardet. Here are some numbers.
+
+| Package | Accuracy | Mean per file (ms) | File per sec (est) |
+|-----------------------------------------------|:--------:|:------------------:|:------------------:|
+| [chardet](https://github.com/chardet/chardet) | 86 % | 200 ms | 5 file/sec |
+| charset-normalizer | **98 %** | **10 ms** | 100 file/sec |
+
+| Package | 99th percentile | 95th percentile | 50th percentile |
+|-----------------------------------------------|:---------------:|:---------------:|:---------------:|
+| [chardet](https://github.com/chardet/chardet) | 1200 ms | 287 ms | 23 ms |
+| charset-normalizer | 100 ms | 50 ms | 5 ms |
+
+Chardet's performance on larger file (1MB+) are very poor. Expect huge difference on large payload.
+
+> Stats are generated using 400+ files using default parameters. More details on used files, see GHA workflows.
+> And yes, these results might change at any time. The dataset can be updated to include more files.
+> The actual delays heavily depends on your CPU capabilities. The factors should remain the same.
+> Keep in mind that the stats are generous and that Chardet accuracy vs our is measured using Chardet initial capability
+> (eg. Supported Encoding) Challenge-them if you want.
+
+## ✨ Installation
+
+Using pip:
+
+```sh
+pip install charset-normalizer -U
+```
+
+## 🚀 Basic Usage
+
+### CLI
+This package comes with a CLI.
+
+```
+usage: normalizer [-h] [-v] [-a] [-n] [-m] [-r] [-f] [-t THRESHOLD]
+ file [file ...]
+
+The Real First Universal Charset Detector. Discover originating encoding used
+on text file. Normalize text to unicode.
+
+positional arguments:
+ files File(s) to be analysed
+
+optional arguments:
+ -h, --help show this help message and exit
+ -v, --verbose Display complementary information about file if any.
+ Stdout will contain logs about the detection process.
+ -a, --with-alternative
+ Output complementary possibilities if any. Top-level
+ JSON WILL be a list.
+ -n, --normalize Permit to normalize input file. If not set, program
+ does not write anything.
+ -m, --minimal Only output the charset detected to STDOUT. Disabling
+ JSON output.
+ -r, --replace Replace file when trying to normalize it instead of
+ creating a new one.
+ -f, --force Replace file without asking if you are sure, use this
+ flag with caution.
+ -t THRESHOLD, --threshold THRESHOLD
+ Define a custom maximum amount of chaos allowed in
+ decoded content. 0. <= chaos <= 1.
+ --version Show version information and exit.
+```
+
+```bash
+normalizer ./data/sample.1.fr.srt
+```
+
+or
+
+```bash
+python -m charset_normalizer ./data/sample.1.fr.srt
+```
+
+🎉 Since version 1.4.0 the CLI produce easily usable stdout result in JSON format.
+
+```json
+{
+ "path": "/home/default/projects/charset_normalizer/data/sample.1.fr.srt",
+ "encoding": "cp1252",
+ "encoding_aliases": [
+ "1252",
+ "windows_1252"
+ ],
+ "alternative_encodings": [
+ "cp1254",
+ "cp1256",
+ "cp1258",
+ "iso8859_14",
+ "iso8859_15",
+ "iso8859_16",
+ "iso8859_3",
+ "iso8859_9",
+ "latin_1",
+ "mbcs"
+ ],
+ "language": "French",
+ "alphabets": [
+ "Basic Latin",
+ "Latin-1 Supplement"
+ ],
+ "has_sig_or_bom": false,
+ "chaos": 0.149,
+ "coherence": 97.152,
+ "unicode_path": null,
+ "is_preferred": true
+}
+```
+
+### Python
+*Just print out normalized text*
+```python
+from charset_normalizer import from_path
+
+results = from_path('./my_subtitle.srt')
+
+print(str(results.best()))
+```
+
+*Upgrade your code without effort*
+```python
+from charset_normalizer import detect
+```
+
+The above code will behave the same as **chardet**. We ensure that we offer the best (reasonable) BC result possible.
+
+See the docs for advanced usage : [readthedocs.io](https://charset-normalizer.readthedocs.io/en/latest/)
+
+## 😇 Why
+
+When I started using Chardet, I noticed that it was not suited to my expectations, and I wanted to propose a
+reliable alternative using a completely different method. Also! I never back down on a good challenge!
+
+I **don't care** about the **originating charset** encoding, because **two different tables** can
+produce **two identical rendered string.**
+What I want is to get readable text, the best I can.
+
+In a way, **I'm brute forcing text decoding.** How cool is that ? 😎
+
+Don't confuse package **ftfy** with charset-normalizer or chardet. ftfy goal is to repair unicode string whereas charset-normalizer to convert raw file in unknown encoding to unicode.
+
+## 🍰 How
+
+ - Discard all charset encoding table that could not fit the binary content.
+ - Measure noise, or the mess once opened (by chunks) with a corresponding charset encoding.
+ - Extract matches with the lowest mess detected.
+ - Additionally, we measure coherence / probe for a language.
+
+**Wait a minute**, what is noise/mess and coherence according to **YOU ?**
+
+*Noise :* I opened hundred of text files, **written by humans**, with the wrong encoding table. **I observed**, then
+**I established** some ground rules about **what is obvious** when **it seems like** a mess.
+ I know that my interpretation of what is noise is probably incomplete, feel free to contribute in order to
+ improve or rewrite it.
+
+*Coherence :* For each language there is on earth, we have computed ranked letter appearance occurrences (the best we can). So I thought
+that intel is worth something here. So I use those records against decoded text to check if I can detect intelligent design.
+
+## ⚡ Known limitations
+
+ - Language detection is unreliable when text contains two or more languages sharing identical letters. (eg. HTML (english tags) + Turkish content (Sharing Latin characters))
+ - Every charset detector heavily depends on sufficient content. In common cases, do not bother run detection on very tiny content.
+
+## ⚠️ About Python EOLs
+
+**If you are running:**
+
+- Python >=2.7,<3.5: Unsupported
+- Python 3.5: charset-normalizer < 2.1
+- Python 3.6: charset-normalizer < 3.1
+- Python 3.7: charset-normalizer < 4.0
+
+Upgrade your Python interpreter as soon as possible.
+
+## 👤 Contributing
+
+Contributions, issues and feature requests are very much welcome.
+Feel free to check [issues page](https://github.com/ousret/charset_normalizer/issues) if you want to contribute.
+
+## 📝 License
+
+Copyright © [Ahmed TAHRI @Ousret](https://github.com/Ousret).
+This project is [MIT](https://github.com/Ousret/charset_normalizer/blob/master/LICENSE) licensed.
+
+Characters frequencies used in this project © 2012 [Denny Vrandečić](http://simia.net/letters/)
+
+## 💼 For Enterprise
+
+Professional support for charset-normalizer is available as part of the [Tidelift
+Subscription][1]. Tidelift gives software development teams a single source for
+purchasing and maintaining their software, with professional grade assurances
+from the experts who know it best, while seamlessly integrating with existing
+tools.
+
+[1]: https://tidelift.com/subscription/pkg/pypi-charset-normalizer?utm_source=pypi-charset-normalizer&utm_medium=readme
+
+# Changelog
+All notable changes to charset-normalizer will be documented in this file. This project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
+The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/).
+
+## [3.4.0](https://github.com/Ousret/charset_normalizer/compare/3.3.2...3.4.0) (2024-10-08)
+
+### Added
+- Argument `--no-preemptive` in the CLI to prevent the detector to search for hints.
+- Support for Python 3.13 (#512)
+
+### Fixed
+- Relax the TypeError exception thrown when trying to compare a CharsetMatch with anything else than a CharsetMatch.
+- Improved the general reliability of the detector based on user feedbacks. (#520) (#509) (#498) (#407) (#537)
+- Declared charset in content (preemptive detection) not changed when converting to utf-8 bytes. (#381)
+
+## [3.3.2](https://github.com/Ousret/charset_normalizer/compare/3.3.1...3.3.2) (2023-10-31)
+
+### Fixed
+- Unintentional memory usage regression when using large payload that match several encoding (#376)
+- Regression on some detection case showcased in the documentation (#371)
+
+### Added
+- Noise (md) probe that identify malformed arabic representation due to the presence of letters in isolated form (credit to my wife)
+
+## [3.3.1](https://github.com/Ousret/charset_normalizer/compare/3.3.0...3.3.1) (2023-10-22)
+
+### Changed
+- Optional mypyc compilation upgraded to version 1.6.1 for Python >= 3.8
+- Improved the general detection reliability based on reports from the community
+
+## [3.3.0](https://github.com/Ousret/charset_normalizer/compare/3.2.0...3.3.0) (2023-09-30)
+
+### Added
+- Allow to execute the CLI (e.g. normalizer) through `python -m charset_normalizer.cli` or `python -m charset_normalizer`
+- Support for 9 forgotten encoding that are supported by Python but unlisted in `encoding.aliases` as they have no alias (#323)
+
+### Removed
+- (internal) Redundant utils.is_ascii function and unused function is_private_use_only
+- (internal) charset_normalizer.assets is moved inside charset_normalizer.constant
+
+### Changed
+- (internal) Unicode code blocks in constants are updated using the latest v15.0.0 definition to improve detection
+- Optional mypyc compilation upgraded to version 1.5.1 for Python >= 3.8
+
+### Fixed
+- Unable to properly sort CharsetMatch when both chaos/noise and coherence were close due to an unreachable condition in \_\_lt\_\_ (#350)
+
+## [3.2.0](https://github.com/Ousret/charset_normalizer/compare/3.1.0...3.2.0) (2023-06-07)
+
+### Changed
+- Typehint for function `from_path` no longer enforce `PathLike` as its first argument
+- Minor improvement over the global detection reliability
+
+### Added
+- Introduce function `is_binary` that relies on main capabilities, and optimized to detect binaries
+- Propagate `enable_fallback` argument throughout `from_bytes`, `from_path`, and `from_fp` that allow a deeper control over the detection (default True)
+- Explicit support for Python 3.12
+
+### Fixed
+- Edge case detection failure where a file would contain 'very-long' camel cased word (Issue #289)
+
+## [3.1.0](https://github.com/Ousret/charset_normalizer/compare/3.0.1...3.1.0) (2023-03-06)
+
+### Added
+- Argument `should_rename_legacy` for legacy function `detect` and disregard any new arguments without errors (PR #262)
+
+### Removed
+- Support for Python 3.6 (PR #260)
+
+### Changed
+- Optional speedup provided by mypy/c 1.0.1
+
+## [3.0.1](https://github.com/Ousret/charset_normalizer/compare/3.0.0...3.0.1) (2022-11-18)
+
+### Fixed
+- Multi-bytes cutter/chunk generator did not always cut correctly (PR #233)
+
+### Changed
+- Speedup provided by mypy/c 0.990 on Python >= 3.7
+
+## [3.0.0](https://github.com/Ousret/charset_normalizer/compare/2.1.1...3.0.0) (2022-10-20)
+
+### Added
+- Extend the capability of explain=True when cp_isolation contains at most two entries (min one), will log in details of the Mess-detector results
+- Support for alternative language frequency set in charset_normalizer.assets.FREQUENCIES
+- Add parameter `language_threshold` in `from_bytes`, `from_path` and `from_fp` to adjust the minimum expected coherence ratio
+- `normalizer --version` now specify if current version provide extra speedup (meaning mypyc compilation whl)
+
+### Changed
+- Build with static metadata using 'build' frontend
+- Make the language detection stricter
+- Optional: Module `md.py` can be compiled using Mypyc to provide an extra speedup up to 4x faster than v2.1
+
+### Fixed
+- CLI with opt --normalize fail when using full path for files
+- TooManyAccentuatedPlugin induce false positive on the mess detection when too few alpha character have been fed to it
+- Sphinx warnings when generating the documentation
+
+### Removed
+- Coherence detector no longer return 'Simple English' instead return 'English'
+- Coherence detector no longer return 'Classical Chinese' instead return 'Chinese'
+- Breaking: Method `first()` and `best()` from CharsetMatch
+- UTF-7 will no longer appear as "detected" without a recognized SIG/mark (is unreliable/conflict with ASCII)
+- Breaking: Class aliases CharsetDetector, CharsetDoctor, CharsetNormalizerMatch and CharsetNormalizerMatches
+- Breaking: Top-level function `normalize`
+- Breaking: Properties `chaos_secondary_pass`, `coherence_non_latin` and `w_counter` from CharsetMatch
+- Support for the backport `unicodedata2`
+
+## [3.0.0rc1](https://github.com/Ousret/charset_normalizer/compare/3.0.0b2...3.0.0rc1) (2022-10-18)
+
+### Added
+- Extend the capability of explain=True when cp_isolation contains at most two entries (min one), will log in details of the Mess-detector results
+- Support for alternative language frequency set in charset_normalizer.assets.FREQUENCIES
+- Add parameter `language_threshold` in `from_bytes`, `from_path` and `from_fp` to adjust the minimum expected coherence ratio
+
+### Changed
+- Build with static metadata using 'build' frontend
+- Make the language detection stricter
+
+### Fixed
+- CLI with opt --normalize fail when using full path for files
+- TooManyAccentuatedPlugin induce false positive on the mess detection when too few alpha character have been fed to it
+
+### Removed
+- Coherence detector no longer return 'Simple English' instead return 'English'
+- Coherence detector no longer return 'Classical Chinese' instead return 'Chinese'
+
+## [3.0.0b2](https://github.com/Ousret/charset_normalizer/compare/3.0.0b1...3.0.0b2) (2022-08-21)
+
+### Added
+- `normalizer --version` now specify if current version provide extra speedup (meaning mypyc compilation whl)
+
+### Removed
+- Breaking: Method `first()` and `best()` from CharsetMatch
+- UTF-7 will no longer appear as "detected" without a recognized SIG/mark (is unreliable/conflict with ASCII)
+
+### Fixed
+- Sphinx warnings when generating the documentation
+
+## [3.0.0b1](https://github.com/Ousret/charset_normalizer/compare/2.1.0...3.0.0b1) (2022-08-15)
+
+### Changed
+- Optional: Module `md.py` can be compiled using Mypyc to provide an extra speedup up to 4x faster than v2.1
+
+### Removed
+- Breaking: Class aliases CharsetDetector, CharsetDoctor, CharsetNormalizerMatch and CharsetNormalizerMatches
+- Breaking: Top-level function `normalize`
+- Breaking: Properties `chaos_secondary_pass`, `coherence_non_latin` and `w_counter` from CharsetMatch
+- Support for the backport `unicodedata2`
+
+## [2.1.1](https://github.com/Ousret/charset_normalizer/compare/2.1.0...2.1.1) (2022-08-19)
+
+### Deprecated
+- Function `normalize` scheduled for removal in 3.0
+
+### Changed
+- Removed useless call to decode in fn is_unprintable (#206)
+
+### Fixed
+- Third-party library (i18n xgettext) crashing not recognizing utf_8 (PEP 263) with underscore from [@aleksandernovikov](https://github.com/aleksandernovikov) (#204)
+
+## [2.1.0](https://github.com/Ousret/charset_normalizer/compare/2.0.12...2.1.0) (2022-06-19)
+
+### Added
+- Output the Unicode table version when running the CLI with `--version` (PR #194)
+
+### Changed
+- Re-use decoded buffer for single byte character sets from [@nijel](https://github.com/nijel) (PR #175)
+- Fixing some performance bottlenecks from [@deedy5](https://github.com/deedy5) (PR #183)
+
+### Fixed
+- Workaround potential bug in cpython with Zero Width No-Break Space located in Arabic Presentation Forms-B, Unicode 1.1 not acknowledged as space (PR #175)
+- CLI default threshold aligned with the API threshold from [@oleksandr-kuzmenko](https://github.com/oleksandr-kuzmenko) (PR #181)
+
+### Removed
+- Support for Python 3.5 (PR #192)
+
+### Deprecated
+- Use of backport unicodedata from `unicodedata2` as Python is quickly catching up, scheduled for removal in 3.0 (PR #194)
+
+## [2.0.12](https://github.com/Ousret/charset_normalizer/compare/2.0.11...2.0.12) (2022-02-12)
+
+### Fixed
+- ASCII miss-detection on rare cases (PR #170)
+
+## [2.0.11](https://github.com/Ousret/charset_normalizer/compare/2.0.10...2.0.11) (2022-01-30)
+
+### Added
+- Explicit support for Python 3.11 (PR #164)
+
+### Changed
+- The logging behavior have been completely reviewed, now using only TRACE and DEBUG levels (PR #163 #165)
+
+## [2.0.10](https://github.com/Ousret/charset_normalizer/compare/2.0.9...2.0.10) (2022-01-04)
+
+### Fixed
+- Fallback match entries might lead to UnicodeDecodeError for large bytes sequence (PR #154)
+
+### Changed
+- Skipping the language-detection (CD) on ASCII (PR #155)
+
+## [2.0.9](https://github.com/Ousret/charset_normalizer/compare/2.0.8...2.0.9) (2021-12-03)
+
+### Changed
+- Moderating the logging impact (since 2.0.8) for specific environments (PR #147)
+
+### Fixed
+- Wrong logging level applied when setting kwarg `explain` to True (PR #146)
+
+## [2.0.8](https://github.com/Ousret/charset_normalizer/compare/2.0.7...2.0.8) (2021-11-24)
+### Changed
+- Improvement over Vietnamese detection (PR #126)
+- MD improvement on trailing data and long foreign (non-pure latin) data (PR #124)
+- Efficiency improvements in cd/alphabet_languages from [@adbar](https://github.com/adbar) (PR #122)
+- call sum() without an intermediary list following PEP 289 recommendations from [@adbar](https://github.com/adbar) (PR #129)
+- Code style as refactored by Sourcery-AI (PR #131)
+- Minor adjustment on the MD around european words (PR #133)
+- Remove and replace SRTs from assets / tests (PR #139)
+- Initialize the library logger with a `NullHandler` by default from [@nmaynes](https://github.com/nmaynes) (PR #135)
+- Setting kwarg `explain` to True will add provisionally (bounded to function lifespan) a specific stream handler (PR #135)
+
+### Fixed
+- Fix large (misleading) sequence giving UnicodeDecodeError (PR #137)
+- Avoid using too insignificant chunk (PR #137)
+
+### Added
+- Add and expose function `set_logging_handler` to configure a specific StreamHandler from [@nmaynes](https://github.com/nmaynes) (PR #135)
+- Add `CHANGELOG.md` entries, format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/) (PR #141)
+
+## [2.0.7](https://github.com/Ousret/charset_normalizer/compare/2.0.6...2.0.7) (2021-10-11)
+### Added
+- Add support for Kazakh (Cyrillic) language detection (PR #109)
+
+### Changed
+- Further, improve inferring the language from a given single-byte code page (PR #112)
+- Vainly trying to leverage PEP263 when PEP3120 is not supported (PR #116)
+- Refactoring for potential performance improvements in loops from [@adbar](https://github.com/adbar) (PR #113)
+- Various detection improvement (MD+CD) (PR #117)
+
+### Removed
+- Remove redundant logging entry about detected language(s) (PR #115)
+
+### Fixed
+- Fix a minor inconsistency between Python 3.5 and other versions regarding language detection (PR #117 #102)
+
+## [2.0.6](https://github.com/Ousret/charset_normalizer/compare/2.0.5...2.0.6) (2021-09-18)
+### Fixed
+- Unforeseen regression with the loss of the backward-compatibility with some older minor of Python 3.5.x (PR #100)
+- Fix CLI crash when using --minimal output in certain cases (PR #103)
+
+### Changed
+- Minor improvement to the detection efficiency (less than 1%) (PR #106 #101)
+
+## [2.0.5](https://github.com/Ousret/charset_normalizer/compare/2.0.4...2.0.5) (2021-09-14)
+### Changed
+- The project now comply with: flake8, mypy, isort and black to ensure a better overall quality (PR #81)
+- The BC-support with v1.x was improved, the old staticmethods are restored (PR #82)
+- The Unicode detection is slightly improved (PR #93)
+- Add syntax sugar \_\_bool\_\_ for results CharsetMatches list-container (PR #91)
+
+### Removed
+- The project no longer raise warning on tiny content given for detection, will be simply logged as warning instead (PR #92)
+
+### Fixed
+- In some rare case, the chunks extractor could cut in the middle of a multi-byte character and could mislead the mess detection (PR #95)
+- Some rare 'space' characters could trip up the UnprintablePlugin/Mess detection (PR #96)
+- The MANIFEST.in was not exhaustive (PR #78)
+
+## [2.0.4](https://github.com/Ousret/charset_normalizer/compare/2.0.3...2.0.4) (2021-07-30)
+### Fixed
+- The CLI no longer raise an unexpected exception when no encoding has been found (PR #70)
+- Fix accessing the 'alphabets' property when the payload contains surrogate characters (PR #68)
+- The logger could mislead (explain=True) on detected languages and the impact of one MBCS match (PR #72)
+- Submatch factoring could be wrong in rare edge cases (PR #72)
+- Multiple files given to the CLI were ignored when publishing results to STDOUT. (After the first path) (PR #72)
+- Fix line endings from CRLF to LF for certain project files (PR #67)
+
+### Changed
+- Adjust the MD to lower the sensitivity, thus improving the global detection reliability (PR #69 #76)
+- Allow fallback on specified encoding if any (PR #71)
+
+## [2.0.3](https://github.com/Ousret/charset_normalizer/compare/2.0.2...2.0.3) (2021-07-16)
+### Changed
+- Part of the detection mechanism has been improved to be less sensitive, resulting in more accurate detection results. Especially ASCII. (PR #63)
+- According to the community wishes, the detection will fall back on ASCII or UTF-8 in a last-resort case. (PR #64)
+
+## [2.0.2](https://github.com/Ousret/charset_normalizer/compare/2.0.1...2.0.2) (2021-07-15)
+### Fixed
+- Empty/Too small JSON payload miss-detection fixed. Report from [@tseaver](https://github.com/tseaver) (PR #59)
+
+### Changed
+- Don't inject unicodedata2 into sys.modules from [@akx](https://github.com/akx) (PR #57)
+
+## [2.0.1](https://github.com/Ousret/charset_normalizer/compare/2.0.0...2.0.1) (2021-07-13)
+### Fixed
+- Make it work where there isn't a filesystem available, dropping assets frequencies.json. Report from [@sethmlarson](https://github.com/sethmlarson). (PR #55)
+- Using explain=False permanently disable the verbose output in the current runtime (PR #47)
+- One log entry (language target preemptive) was not show in logs when using explain=True (PR #47)
+- Fix undesired exception (ValueError) on getitem of instance CharsetMatches (PR #52)
+
+### Changed
+- Public function normalize default args values were not aligned with from_bytes (PR #53)
+
+### Added
+- You may now use charset aliases in cp_isolation and cp_exclusion arguments (PR #47)
+
+## [2.0.0](https://github.com/Ousret/charset_normalizer/compare/1.4.1...2.0.0) (2021-07-02)
+### Changed
+- 4x to 5 times faster than the previous 1.4.0 release. At least 2x faster than Chardet.
+- Accent has been made on UTF-8 detection, should perform rather instantaneous.
+- The backward compatibility with Chardet has been greatly improved. The legacy detect function returns an identical charset name whenever possible.
+- The detection mechanism has been slightly improved, now Turkish content is detected correctly (most of the time)
+- The program has been rewritten to ease the readability and maintainability. (+Using static typing)+
+- utf_7 detection has been reinstated.
+
+### Removed
+- This package no longer require anything when used with Python 3.5 (Dropped cached_property)
+- Removed support for these languages: Catalan, Esperanto, Kazakh, Baque, Volapük, Azeri, Galician, Nynorsk, Macedonian, and Serbocroatian.
+- The exception hook on UnicodeDecodeError has been removed.
+
+### Deprecated
+- Methods coherence_non_latin, w_counter, chaos_secondary_pass of the class CharsetMatch are now deprecated and scheduled for removal in v3.0
+
+### Fixed
+- The CLI output used the relative path of the file(s). Should be absolute.
+
+## [1.4.1](https://github.com/Ousret/charset_normalizer/compare/1.4.0...1.4.1) (2021-05-28)
+### Fixed
+- Logger configuration/usage no longer conflict with others (PR #44)
+
+## [1.4.0](https://github.com/Ousret/charset_normalizer/compare/1.3.9...1.4.0) (2021-05-21)
+### Removed
+- Using standard logging instead of using the package loguru.
+- Dropping nose test framework in favor of the maintained pytest.
+- Choose to not use dragonmapper package to help with gibberish Chinese/CJK text.
+- Require cached_property only for Python 3.5 due to constraint. Dropping for every other interpreter version.
+- Stop support for UTF-7 that does not contain a SIG.
+- Dropping PrettyTable, replaced with pure JSON output in CLI.
+
+### Fixed
+- BOM marker in a CharsetNormalizerMatch instance could be False in rare cases even if obviously present. Due to the sub-match factoring process.
+- Not searching properly for the BOM when trying utf32/16 parent codec.
+
+### Changed
+- Improving the package final size by compressing frequencies.json.
+- Huge improvement over the larges payload.
+
+### Added
+- CLI now produces JSON consumable output.
+- Return ASCII if given sequences fit. Given reasonable confidence.
+
+## [1.3.9](https://github.com/Ousret/charset_normalizer/compare/1.3.8...1.3.9) (2021-05-13)
+
+### Fixed
+- In some very rare cases, you may end up getting encode/decode errors due to a bad bytes payload (PR #40)
+
+## [1.3.8](https://github.com/Ousret/charset_normalizer/compare/1.3.7...1.3.8) (2021-05-12)
+
+### Fixed
+- Empty given payload for detection may cause an exception if trying to access the `alphabets` property. (PR #39)
+
+## [1.3.7](https://github.com/Ousret/charset_normalizer/compare/1.3.6...1.3.7) (2021-05-12)
+
+### Fixed
+- The legacy detect function should return UTF-8-SIG if sig is present in the payload. (PR #38)
+
+## [1.3.6](https://github.com/Ousret/charset_normalizer/compare/1.3.5...1.3.6) (2021-02-09)
+
+### Changed
+- Amend the previous release to allow prettytable 2.0 (PR #35)
+
+## [1.3.5](https://github.com/Ousret/charset_normalizer/compare/1.3.4...1.3.5) (2021-02-08)
+
+### Fixed
+- Fix error while using the package with a python pre-release interpreter (PR #33)
+
+### Changed
+- Dependencies refactoring, constraints revised.
+
+### Added
+- Add python 3.9 and 3.10 to the supported interpreters
+
+MIT License
+
+Copyright (c) 2019 TAHRI Ahmed R.
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
diff --git a/lambdas/aws-dd-forwarder-3.127.0/charset_normalizer-3.4.0.dist-info/RECORD b/lambdas/aws-dd-forwarder-3.127.0/charset_normalizer-3.4.0.dist-info/RECORD
new file mode 100644
index 0000000..9b31b27
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/charset_normalizer-3.4.0.dist-info/RECORD
@@ -0,0 +1,36 @@
+../../bin/normalizer,sha256=d64Y2GlBYzj4fRL5WK1WS-VHgezegWBH89IcZpevMig,242
+charset_normalizer-3.4.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
+charset_normalizer-3.4.0.dist-info/LICENSE,sha256=6zGgxaT7Cbik4yBV0lweX5w1iidS_vPNcgIT0cz-4kE,1070
+charset_normalizer-3.4.0.dist-info/METADATA,sha256=WGbEW9ehh2spNJxo1M6sEGGZWmsQ-oj2DsMjV29zoms,34159
+charset_normalizer-3.4.0.dist-info/RECORD,,
+charset_normalizer-3.4.0.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+charset_normalizer-3.4.0.dist-info/WHEEL,sha256=XihS4yPLFu_eB7R4sl7jUHiEAA7zQ3q0-_CuIzkpFkk,151
+charset_normalizer-3.4.0.dist-info/entry_points.txt,sha256=ADSTKrkXZ3hhdOVFi6DcUEHQRS0xfxDIE_pEz4wLIXA,65
+charset_normalizer-3.4.0.dist-info/top_level.txt,sha256=7ASyzePr8_xuZWJsnqJjIBtyV8vhEo0wBCv1MPRRi3Q,19
+charset_normalizer/__init__.py,sha256=UzI3xC8PhmcLRMzSgPb6minTmRq0kWznnCBJ8ZCc2XI,1577
+charset_normalizer/__main__.py,sha256=JxY8bleaENOFlLRb9HfoeZCzAMnn2A1oGR5Xm2eyqg0,73
+charset_normalizer/__pycache__/__init__.cpython-311.pyc,,
+charset_normalizer/__pycache__/__main__.cpython-311.pyc,,
+charset_normalizer/__pycache__/api.cpython-311.pyc,,
+charset_normalizer/__pycache__/cd.cpython-311.pyc,,
+charset_normalizer/__pycache__/constant.cpython-311.pyc,,
+charset_normalizer/__pycache__/legacy.cpython-311.pyc,,
+charset_normalizer/__pycache__/md.cpython-311.pyc,,
+charset_normalizer/__pycache__/models.cpython-311.pyc,,
+charset_normalizer/__pycache__/utils.cpython-311.pyc,,
+charset_normalizer/__pycache__/version.cpython-311.pyc,,
+charset_normalizer/api.py,sha256=kMyNUqrfBZU22PP0pYKrSldtYUGA24wsGlXGLAKra7c,22559
+charset_normalizer/cd.py,sha256=xwZliZcTQFA3jU0c00PRiu9MNxXTFxQkFLWmMW24ZzI,12560
+charset_normalizer/cli/__init__.py,sha256=D5ERp8P62llm2FuoMzydZ7d9rs8cvvLXqE-1_6oViPc,100
+charset_normalizer/cli/__main__.py,sha256=zX9sV_ApU1d96Wb0cS04vulstdB4F0Eh7kLn-gevfw4,10411
+charset_normalizer/cli/__pycache__/__init__.cpython-311.pyc,,
+charset_normalizer/cli/__pycache__/__main__.cpython-311.pyc,,
+charset_normalizer/constant.py,sha256=uwoW87NicWZDTLviX7le0wdoYBbhBQDA4n1JtJo77ts,40499
+charset_normalizer/legacy.py,sha256=XJjkT0hejMH8qfAKz1ts8OUiBT18t2FJP3tJgLwUWwc,2327
+charset_normalizer/md.cpython-311-x86_64-linux-gnu.so,sha256=Y7QSLD5QLoSFAWys0-tL7R6QB7oi5864zM6zr7RWek4,16064
+charset_normalizer/md.py,sha256=SIIZcENrslI7h3v4GigbFN61fRyE_wiCN1z9Ii3fBRo,20138
+charset_normalizer/md__mypyc.cpython-311-x86_64-linux-gnu.so,sha256=xDjCrj9MzdH8kW7d-HbtvIaOcrX6SFiV7SrBv4QgGEI,272696
+charset_normalizer/models.py,sha256=oAMAcBSEY7CngbUXJp34Wc4Rl9NKJJjGmUwW3EPtk6g,12425
+charset_normalizer/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+charset_normalizer/utils.py,sha256=teiosMqzKjXyAHXnGdjSBOgnBZwx-SkBbCLrx0UXy8M,11894
+charset_normalizer/version.py,sha256=AX66S4ytQFdd6F5jbVU2OPMqYwFS5M3BkMvyX-3BKF8,79
diff --git a/lambdas/aws-dd-forwarder-3.127.0/charset_normalizer-3.4.0.dist-info/REQUESTED b/lambdas/aws-dd-forwarder-3.127.0/charset_normalizer-3.4.0.dist-info/REQUESTED
new file mode 100644
index 0000000..e69de29
diff --git a/lambdas/aws-dd-forwarder-3.127.0/charset_normalizer-3.4.0.dist-info/WHEEL b/lambdas/aws-dd-forwarder-3.127.0/charset_normalizer-3.4.0.dist-info/WHEEL
new file mode 100644
index 0000000..d9c3682
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/charset_normalizer-3.4.0.dist-info/WHEEL
@@ -0,0 +1,6 @@
+Wheel-Version: 1.0
+Generator: setuptools (75.1.0)
+Root-Is-Purelib: false
+Tag: cp311-cp311-manylinux_2_17_x86_64
+Tag: cp311-cp311-manylinux2014_x86_64
+
diff --git a/lambdas/aws-dd-forwarder-3.127.0/charset_normalizer-3.4.0.dist-info/entry_points.txt b/lambdas/aws-dd-forwarder-3.127.0/charset_normalizer-3.4.0.dist-info/entry_points.txt
new file mode 100644
index 0000000..65619e7
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/charset_normalizer-3.4.0.dist-info/entry_points.txt
@@ -0,0 +1,2 @@
+[console_scripts]
+normalizer = charset_normalizer.cli:cli_detect
diff --git a/lambdas/aws-dd-forwarder-3.127.0/charset_normalizer-3.4.0.dist-info/top_level.txt b/lambdas/aws-dd-forwarder-3.127.0/charset_normalizer-3.4.0.dist-info/top_level.txt
new file mode 100644
index 0000000..66958f0
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/charset_normalizer-3.4.0.dist-info/top_level.txt
@@ -0,0 +1 @@
+charset_normalizer
diff --git a/lambdas/aws-dd-forwarder-3.127.0/charset_normalizer/.DS_Store b/lambdas/aws-dd-forwarder-3.127.0/charset_normalizer/.DS_Store
new file mode 100644
index 0000000..7e52a6c
Binary files /dev/null and b/lambdas/aws-dd-forwarder-3.127.0/charset_normalizer/.DS_Store differ
diff --git a/lambdas/aws-dd-forwarder-3.127.0/charset_normalizer/__init__.py b/lambdas/aws-dd-forwarder-3.127.0/charset_normalizer/__init__.py
new file mode 100644
index 0000000..55991fc
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/charset_normalizer/__init__.py
@@ -0,0 +1,46 @@
+# -*- coding: utf-8 -*-
+"""
+Charset-Normalizer
+~~~~~~~~~~~~~~
+The Real First Universal Charset Detector.
+A library that helps you read text from an unknown charset encoding.
+Motivated by chardet, This package is trying to resolve the issue by taking a new approach.
+All IANA character set names for which the Python core library provides codecs are supported.
+
+Basic usage:
+ >>> from charset_normalizer import from_bytes
+ >>> results = from_bytes('Bсеки човек има право на образование. Oбразованието!'.encode('utf_8'))
+ >>> best_guess = results.best()
+ >>> str(best_guess)
+ 'Bсеки човек има право на образование. Oбразованието!'
+
+Others methods and usages are available - see the full documentation
+at .
+:copyright: (c) 2021 by Ahmed TAHRI
+:license: MIT, see LICENSE for more details.
+"""
+import logging
+
+from .api import from_bytes, from_fp, from_path, is_binary
+from .legacy import detect
+from .models import CharsetMatch, CharsetMatches
+from .utils import set_logging_handler
+from .version import VERSION, __version__
+
+__all__ = (
+ "from_fp",
+ "from_path",
+ "from_bytes",
+ "is_binary",
+ "detect",
+ "CharsetMatch",
+ "CharsetMatches",
+ "__version__",
+ "VERSION",
+ "set_logging_handler",
+)
+
+# Attach a NullHandler to the top level logger by default
+# https://docs.python.org/3.3/howto/logging.html#configuring-logging-for-a-library
+
+logging.getLogger("charset_normalizer").addHandler(logging.NullHandler())
diff --git a/lambdas/aws-dd-forwarder-3.127.0/charset_normalizer/__main__.py b/lambdas/aws-dd-forwarder-3.127.0/charset_normalizer/__main__.py
new file mode 100644
index 0000000..beae2ef
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/charset_normalizer/__main__.py
@@ -0,0 +1,4 @@
+from .cli import cli_detect
+
+if __name__ == "__main__":
+ cli_detect()
diff --git a/lambdas/aws-dd-forwarder-3.127.0/charset_normalizer/api.py b/lambdas/aws-dd-forwarder-3.127.0/charset_normalizer/api.py
new file mode 100644
index 0000000..e3f2283
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/charset_normalizer/api.py
@@ -0,0 +1,668 @@
+import logging
+from os import PathLike
+from typing import BinaryIO, List, Optional, Set, Union
+
+from .cd import (
+ coherence_ratio,
+ encoding_languages,
+ mb_encoding_languages,
+ merge_coherence_ratios,
+)
+from .constant import IANA_SUPPORTED, TOO_BIG_SEQUENCE, TOO_SMALL_SEQUENCE, TRACE
+from .md import mess_ratio
+from .models import CharsetMatch, CharsetMatches
+from .utils import (
+ any_specified_encoding,
+ cut_sequence_chunks,
+ iana_name,
+ identify_sig_or_bom,
+ is_cp_similar,
+ is_multi_byte_encoding,
+ should_strip_sig_or_bom,
+)
+
+# Will most likely be controversial
+# logging.addLevelName(TRACE, "TRACE")
+logger = logging.getLogger("charset_normalizer")
+explain_handler = logging.StreamHandler()
+explain_handler.setFormatter(
+ logging.Formatter("%(asctime)s | %(levelname)s | %(message)s")
+)
+
+
+def from_bytes(
+ sequences: Union[bytes, bytearray],
+ steps: int = 5,
+ chunk_size: int = 512,
+ threshold: float = 0.2,
+ cp_isolation: Optional[List[str]] = None,
+ cp_exclusion: Optional[List[str]] = None,
+ preemptive_behaviour: bool = True,
+ explain: bool = False,
+ language_threshold: float = 0.1,
+ enable_fallback: bool = True,
+) -> CharsetMatches:
+ """
+ Given a raw bytes sequence, return the best possibles charset usable to render str objects.
+ If there is no results, it is a strong indicator that the source is binary/not text.
+ By default, the process will extract 5 blocks of 512o each to assess the mess and coherence of a given sequence.
+ And will give up a particular code page after 20% of measured mess. Those criteria are customizable at will.
+
+ The preemptive behavior DOES NOT replace the traditional detection workflow, it prioritize a particular code page
+ but never take it for granted. Can improve the performance.
+
+ You may want to focus your attention to some code page or/and not others, use cp_isolation and cp_exclusion for that
+ purpose.
+
+ This function will strip the SIG in the payload/sequence every time except on UTF-16, UTF-32.
+ By default the library does not setup any handler other than the NullHandler, if you choose to set the 'explain'
+ toggle to True it will alter the logger configuration to add a StreamHandler that is suitable for debugging.
+ Custom logging format and handler can be set manually.
+ """
+
+ if not isinstance(sequences, (bytearray, bytes)):
+ raise TypeError(
+ "Expected object of type bytes or bytearray, got: {0}".format(
+ type(sequences)
+ )
+ )
+
+ if explain:
+ previous_logger_level: int = logger.level
+ logger.addHandler(explain_handler)
+ logger.setLevel(TRACE)
+
+ length: int = len(sequences)
+
+ if length == 0:
+ logger.debug("Encoding detection on empty bytes, assuming utf_8 intention.")
+ if explain:
+ logger.removeHandler(explain_handler)
+ logger.setLevel(previous_logger_level or logging.WARNING)
+ return CharsetMatches([CharsetMatch(sequences, "utf_8", 0.0, False, [], "")])
+
+ if cp_isolation is not None:
+ logger.log(
+ TRACE,
+ "cp_isolation is set. use this flag for debugging purpose. "
+ "limited list of encoding allowed : %s.",
+ ", ".join(cp_isolation),
+ )
+ cp_isolation = [iana_name(cp, False) for cp in cp_isolation]
+ else:
+ cp_isolation = []
+
+ if cp_exclusion is not None:
+ logger.log(
+ TRACE,
+ "cp_exclusion is set. use this flag for debugging purpose. "
+ "limited list of encoding excluded : %s.",
+ ", ".join(cp_exclusion),
+ )
+ cp_exclusion = [iana_name(cp, False) for cp in cp_exclusion]
+ else:
+ cp_exclusion = []
+
+ if length <= (chunk_size * steps):
+ logger.log(
+ TRACE,
+ "override steps (%i) and chunk_size (%i) as content does not fit (%i byte(s) given) parameters.",
+ steps,
+ chunk_size,
+ length,
+ )
+ steps = 1
+ chunk_size = length
+
+ if steps > 1 and length / steps < chunk_size:
+ chunk_size = int(length / steps)
+
+ is_too_small_sequence: bool = len(sequences) < TOO_SMALL_SEQUENCE
+ is_too_large_sequence: bool = len(sequences) >= TOO_BIG_SEQUENCE
+
+ if is_too_small_sequence:
+ logger.log(
+ TRACE,
+ "Trying to detect encoding from a tiny portion of ({}) byte(s).".format(
+ length
+ ),
+ )
+ elif is_too_large_sequence:
+ logger.log(
+ TRACE,
+ "Using lazy str decoding because the payload is quite large, ({}) byte(s).".format(
+ length
+ ),
+ )
+
+ prioritized_encodings: List[str] = []
+
+ specified_encoding: Optional[str] = (
+ any_specified_encoding(sequences) if preemptive_behaviour else None
+ )
+
+ if specified_encoding is not None:
+ prioritized_encodings.append(specified_encoding)
+ logger.log(
+ TRACE,
+ "Detected declarative mark in sequence. Priority +1 given for %s.",
+ specified_encoding,
+ )
+
+ tested: Set[str] = set()
+ tested_but_hard_failure: List[str] = []
+ tested_but_soft_failure: List[str] = []
+
+ fallback_ascii: Optional[CharsetMatch] = None
+ fallback_u8: Optional[CharsetMatch] = None
+ fallback_specified: Optional[CharsetMatch] = None
+
+ results: CharsetMatches = CharsetMatches()
+
+ early_stop_results: CharsetMatches = CharsetMatches()
+
+ sig_encoding, sig_payload = identify_sig_or_bom(sequences)
+
+ if sig_encoding is not None:
+ prioritized_encodings.append(sig_encoding)
+ logger.log(
+ TRACE,
+ "Detected a SIG or BOM mark on first %i byte(s). Priority +1 given for %s.",
+ len(sig_payload),
+ sig_encoding,
+ )
+
+ prioritized_encodings.append("ascii")
+
+ if "utf_8" not in prioritized_encodings:
+ prioritized_encodings.append("utf_8")
+
+ for encoding_iana in prioritized_encodings + IANA_SUPPORTED:
+ if cp_isolation and encoding_iana not in cp_isolation:
+ continue
+
+ if cp_exclusion and encoding_iana in cp_exclusion:
+ continue
+
+ if encoding_iana in tested:
+ continue
+
+ tested.add(encoding_iana)
+
+ decoded_payload: Optional[str] = None
+ bom_or_sig_available: bool = sig_encoding == encoding_iana
+ strip_sig_or_bom: bool = bom_or_sig_available and should_strip_sig_or_bom(
+ encoding_iana
+ )
+
+ if encoding_iana in {"utf_16", "utf_32"} and not bom_or_sig_available:
+ logger.log(
+ TRACE,
+ "Encoding %s won't be tested as-is because it require a BOM. Will try some sub-encoder LE/BE.",
+ encoding_iana,
+ )
+ continue
+ if encoding_iana in {"utf_7"} and not bom_or_sig_available:
+ logger.log(
+ TRACE,
+ "Encoding %s won't be tested as-is because detection is unreliable without BOM/SIG.",
+ encoding_iana,
+ )
+ continue
+
+ try:
+ is_multi_byte_decoder: bool = is_multi_byte_encoding(encoding_iana)
+ except (ModuleNotFoundError, ImportError):
+ logger.log(
+ TRACE,
+ "Encoding %s does not provide an IncrementalDecoder",
+ encoding_iana,
+ )
+ continue
+
+ try:
+ if is_too_large_sequence and is_multi_byte_decoder is False:
+ str(
+ (
+ sequences[: int(50e4)]
+ if strip_sig_or_bom is False
+ else sequences[len(sig_payload) : int(50e4)]
+ ),
+ encoding=encoding_iana,
+ )
+ else:
+ decoded_payload = str(
+ (
+ sequences
+ if strip_sig_or_bom is False
+ else sequences[len(sig_payload) :]
+ ),
+ encoding=encoding_iana,
+ )
+ except (UnicodeDecodeError, LookupError) as e:
+ if not isinstance(e, LookupError):
+ logger.log(
+ TRACE,
+ "Code page %s does not fit given bytes sequence at ALL. %s",
+ encoding_iana,
+ str(e),
+ )
+ tested_but_hard_failure.append(encoding_iana)
+ continue
+
+ similar_soft_failure_test: bool = False
+
+ for encoding_soft_failed in tested_but_soft_failure:
+ if is_cp_similar(encoding_iana, encoding_soft_failed):
+ similar_soft_failure_test = True
+ break
+
+ if similar_soft_failure_test:
+ logger.log(
+ TRACE,
+ "%s is deemed too similar to code page %s and was consider unsuited already. Continuing!",
+ encoding_iana,
+ encoding_soft_failed,
+ )
+ continue
+
+ r_ = range(
+ 0 if not bom_or_sig_available else len(sig_payload),
+ length,
+ int(length / steps),
+ )
+
+ multi_byte_bonus: bool = (
+ is_multi_byte_decoder
+ and decoded_payload is not None
+ and len(decoded_payload) < length
+ )
+
+ if multi_byte_bonus:
+ logger.log(
+ TRACE,
+ "Code page %s is a multi byte encoding table and it appear that at least one character "
+ "was encoded using n-bytes.",
+ encoding_iana,
+ )
+
+ max_chunk_gave_up: int = int(len(r_) / 4)
+
+ max_chunk_gave_up = max(max_chunk_gave_up, 2)
+ early_stop_count: int = 0
+ lazy_str_hard_failure = False
+
+ md_chunks: List[str] = []
+ md_ratios = []
+
+ try:
+ for chunk in cut_sequence_chunks(
+ sequences,
+ encoding_iana,
+ r_,
+ chunk_size,
+ bom_or_sig_available,
+ strip_sig_or_bom,
+ sig_payload,
+ is_multi_byte_decoder,
+ decoded_payload,
+ ):
+ md_chunks.append(chunk)
+
+ md_ratios.append(
+ mess_ratio(
+ chunk,
+ threshold,
+ explain is True and 1 <= len(cp_isolation) <= 2,
+ )
+ )
+
+ if md_ratios[-1] >= threshold:
+ early_stop_count += 1
+
+ if (early_stop_count >= max_chunk_gave_up) or (
+ bom_or_sig_available and strip_sig_or_bom is False
+ ):
+ break
+ except (
+ UnicodeDecodeError
+ ) as e: # Lazy str loading may have missed something there
+ logger.log(
+ TRACE,
+ "LazyStr Loading: After MD chunk decode, code page %s does not fit given bytes sequence at ALL. %s",
+ encoding_iana,
+ str(e),
+ )
+ early_stop_count = max_chunk_gave_up
+ lazy_str_hard_failure = True
+
+ # We might want to check the sequence again with the whole content
+ # Only if initial MD tests passes
+ if (
+ not lazy_str_hard_failure
+ and is_too_large_sequence
+ and not is_multi_byte_decoder
+ ):
+ try:
+ sequences[int(50e3) :].decode(encoding_iana, errors="strict")
+ except UnicodeDecodeError as e:
+ logger.log(
+ TRACE,
+ "LazyStr Loading: After final lookup, code page %s does not fit given bytes sequence at ALL. %s",
+ encoding_iana,
+ str(e),
+ )
+ tested_but_hard_failure.append(encoding_iana)
+ continue
+
+ mean_mess_ratio: float = sum(md_ratios) / len(md_ratios) if md_ratios else 0.0
+ if mean_mess_ratio >= threshold or early_stop_count >= max_chunk_gave_up:
+ tested_but_soft_failure.append(encoding_iana)
+ logger.log(
+ TRACE,
+ "%s was excluded because of initial chaos probing. Gave up %i time(s). "
+ "Computed mean chaos is %f %%.",
+ encoding_iana,
+ early_stop_count,
+ round(mean_mess_ratio * 100, ndigits=3),
+ )
+ # Preparing those fallbacks in case we got nothing.
+ if (
+ enable_fallback
+ and encoding_iana in ["ascii", "utf_8", specified_encoding]
+ and not lazy_str_hard_failure
+ ):
+ fallback_entry = CharsetMatch(
+ sequences,
+ encoding_iana,
+ threshold,
+ False,
+ [],
+ decoded_payload,
+ preemptive_declaration=specified_encoding,
+ )
+ if encoding_iana == specified_encoding:
+ fallback_specified = fallback_entry
+ elif encoding_iana == "ascii":
+ fallback_ascii = fallback_entry
+ else:
+ fallback_u8 = fallback_entry
+ continue
+
+ logger.log(
+ TRACE,
+ "%s passed initial chaos probing. Mean measured chaos is %f %%",
+ encoding_iana,
+ round(mean_mess_ratio * 100, ndigits=3),
+ )
+
+ if not is_multi_byte_decoder:
+ target_languages: List[str] = encoding_languages(encoding_iana)
+ else:
+ target_languages = mb_encoding_languages(encoding_iana)
+
+ if target_languages:
+ logger.log(
+ TRACE,
+ "{} should target any language(s) of {}".format(
+ encoding_iana, str(target_languages)
+ ),
+ )
+
+ cd_ratios = []
+
+ # We shall skip the CD when its about ASCII
+ # Most of the time its not relevant to run "language-detection" on it.
+ if encoding_iana != "ascii":
+ for chunk in md_chunks:
+ chunk_languages = coherence_ratio(
+ chunk,
+ language_threshold,
+ ",".join(target_languages) if target_languages else None,
+ )
+
+ cd_ratios.append(chunk_languages)
+
+ cd_ratios_merged = merge_coherence_ratios(cd_ratios)
+
+ if cd_ratios_merged:
+ logger.log(
+ TRACE,
+ "We detected language {} using {}".format(
+ cd_ratios_merged, encoding_iana
+ ),
+ )
+
+ current_match = CharsetMatch(
+ sequences,
+ encoding_iana,
+ mean_mess_ratio,
+ bom_or_sig_available,
+ cd_ratios_merged,
+ (
+ decoded_payload
+ if (
+ is_too_large_sequence is False
+ or encoding_iana in [specified_encoding, "ascii", "utf_8"]
+ )
+ else None
+ ),
+ preemptive_declaration=specified_encoding,
+ )
+
+ results.append(current_match)
+
+ if (
+ encoding_iana in [specified_encoding, "ascii", "utf_8"]
+ and mean_mess_ratio < 0.1
+ ):
+ # If md says nothing to worry about, then... stop immediately!
+ if mean_mess_ratio == 0.0:
+ logger.debug(
+ "Encoding detection: %s is most likely the one.",
+ current_match.encoding,
+ )
+ if explain:
+ logger.removeHandler(explain_handler)
+ logger.setLevel(previous_logger_level)
+ return CharsetMatches([current_match])
+
+ early_stop_results.append(current_match)
+
+ if (
+ len(early_stop_results)
+ and (specified_encoding is None or specified_encoding in tested)
+ and "ascii" in tested
+ and "utf_8" in tested
+ ):
+ probable_result: CharsetMatch = early_stop_results.best() # type: ignore[assignment]
+ logger.debug(
+ "Encoding detection: %s is most likely the one.",
+ probable_result.encoding,
+ )
+ if explain:
+ logger.removeHandler(explain_handler)
+ logger.setLevel(previous_logger_level)
+
+ return CharsetMatches([probable_result])
+
+ if encoding_iana == sig_encoding:
+ logger.debug(
+ "Encoding detection: %s is most likely the one as we detected a BOM or SIG within "
+ "the beginning of the sequence.",
+ encoding_iana,
+ )
+ if explain:
+ logger.removeHandler(explain_handler)
+ logger.setLevel(previous_logger_level)
+ return CharsetMatches([results[encoding_iana]])
+
+ if len(results) == 0:
+ if fallback_u8 or fallback_ascii or fallback_specified:
+ logger.log(
+ TRACE,
+ "Nothing got out of the detection process. Using ASCII/UTF-8/Specified fallback.",
+ )
+
+ if fallback_specified:
+ logger.debug(
+ "Encoding detection: %s will be used as a fallback match",
+ fallback_specified.encoding,
+ )
+ results.append(fallback_specified)
+ elif (
+ (fallback_u8 and fallback_ascii is None)
+ or (
+ fallback_u8
+ and fallback_ascii
+ and fallback_u8.fingerprint != fallback_ascii.fingerprint
+ )
+ or (fallback_u8 is not None)
+ ):
+ logger.debug("Encoding detection: utf_8 will be used as a fallback match")
+ results.append(fallback_u8)
+ elif fallback_ascii:
+ logger.debug("Encoding detection: ascii will be used as a fallback match")
+ results.append(fallback_ascii)
+
+ if results:
+ logger.debug(
+ "Encoding detection: Found %s as plausible (best-candidate) for content. With %i alternatives.",
+ results.best().encoding, # type: ignore
+ len(results) - 1,
+ )
+ else:
+ logger.debug("Encoding detection: Unable to determine any suitable charset.")
+
+ if explain:
+ logger.removeHandler(explain_handler)
+ logger.setLevel(previous_logger_level)
+
+ return results
+
+
+def from_fp(
+ fp: BinaryIO,
+ steps: int = 5,
+ chunk_size: int = 512,
+ threshold: float = 0.20,
+ cp_isolation: Optional[List[str]] = None,
+ cp_exclusion: Optional[List[str]] = None,
+ preemptive_behaviour: bool = True,
+ explain: bool = False,
+ language_threshold: float = 0.1,
+ enable_fallback: bool = True,
+) -> CharsetMatches:
+ """
+ Same thing than the function from_bytes but using a file pointer that is already ready.
+ Will not close the file pointer.
+ """
+ return from_bytes(
+ fp.read(),
+ steps,
+ chunk_size,
+ threshold,
+ cp_isolation,
+ cp_exclusion,
+ preemptive_behaviour,
+ explain,
+ language_threshold,
+ enable_fallback,
+ )
+
+
+def from_path(
+ path: Union[str, bytes, PathLike], # type: ignore[type-arg]
+ steps: int = 5,
+ chunk_size: int = 512,
+ threshold: float = 0.20,
+ cp_isolation: Optional[List[str]] = None,
+ cp_exclusion: Optional[List[str]] = None,
+ preemptive_behaviour: bool = True,
+ explain: bool = False,
+ language_threshold: float = 0.1,
+ enable_fallback: bool = True,
+) -> CharsetMatches:
+ """
+ Same thing than the function from_bytes but with one extra step. Opening and reading given file path in binary mode.
+ Can raise IOError.
+ """
+ with open(path, "rb") as fp:
+ return from_fp(
+ fp,
+ steps,
+ chunk_size,
+ threshold,
+ cp_isolation,
+ cp_exclusion,
+ preemptive_behaviour,
+ explain,
+ language_threshold,
+ enable_fallback,
+ )
+
+
+def is_binary(
+ fp_or_path_or_payload: Union[PathLike, str, BinaryIO, bytes], # type: ignore[type-arg]
+ steps: int = 5,
+ chunk_size: int = 512,
+ threshold: float = 0.20,
+ cp_isolation: Optional[List[str]] = None,
+ cp_exclusion: Optional[List[str]] = None,
+ preemptive_behaviour: bool = True,
+ explain: bool = False,
+ language_threshold: float = 0.1,
+ enable_fallback: bool = False,
+) -> bool:
+ """
+ Detect if the given input (file, bytes, or path) points to a binary file. aka. not a string.
+ Based on the same main heuristic algorithms and default kwargs at the sole exception that fallbacks match
+ are disabled to be stricter around ASCII-compatible but unlikely to be a string.
+ """
+ if isinstance(fp_or_path_or_payload, (str, PathLike)):
+ guesses = from_path(
+ fp_or_path_or_payload,
+ steps=steps,
+ chunk_size=chunk_size,
+ threshold=threshold,
+ cp_isolation=cp_isolation,
+ cp_exclusion=cp_exclusion,
+ preemptive_behaviour=preemptive_behaviour,
+ explain=explain,
+ language_threshold=language_threshold,
+ enable_fallback=enable_fallback,
+ )
+ elif isinstance(
+ fp_or_path_or_payload,
+ (
+ bytes,
+ bytearray,
+ ),
+ ):
+ guesses = from_bytes(
+ fp_or_path_or_payload,
+ steps=steps,
+ chunk_size=chunk_size,
+ threshold=threshold,
+ cp_isolation=cp_isolation,
+ cp_exclusion=cp_exclusion,
+ preemptive_behaviour=preemptive_behaviour,
+ explain=explain,
+ language_threshold=language_threshold,
+ enable_fallback=enable_fallback,
+ )
+ else:
+ guesses = from_fp(
+ fp_or_path_or_payload,
+ steps=steps,
+ chunk_size=chunk_size,
+ threshold=threshold,
+ cp_isolation=cp_isolation,
+ cp_exclusion=cp_exclusion,
+ preemptive_behaviour=preemptive_behaviour,
+ explain=explain,
+ language_threshold=language_threshold,
+ enable_fallback=enable_fallback,
+ )
+
+ return not guesses
diff --git a/lambdas/aws-dd-forwarder-3.127.0/charset_normalizer/cd.py b/lambdas/aws-dd-forwarder-3.127.0/charset_normalizer/cd.py
new file mode 100644
index 0000000..4ea6760
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/charset_normalizer/cd.py
@@ -0,0 +1,395 @@
+import importlib
+from codecs import IncrementalDecoder
+from collections import Counter
+from functools import lru_cache
+from typing import Counter as TypeCounter, Dict, List, Optional, Tuple
+
+from .constant import (
+ FREQUENCIES,
+ KO_NAMES,
+ LANGUAGE_SUPPORTED_COUNT,
+ TOO_SMALL_SEQUENCE,
+ ZH_NAMES,
+)
+from .md import is_suspiciously_successive_range
+from .models import CoherenceMatches
+from .utils import (
+ is_accentuated,
+ is_latin,
+ is_multi_byte_encoding,
+ is_unicode_range_secondary,
+ unicode_range,
+)
+
+
+def encoding_unicode_range(iana_name: str) -> List[str]:
+ """
+ Return associated unicode ranges in a single byte code page.
+ """
+ if is_multi_byte_encoding(iana_name):
+ raise IOError("Function not supported on multi-byte code page")
+
+ decoder = importlib.import_module(
+ "encodings.{}".format(iana_name)
+ ).IncrementalDecoder
+
+ p: IncrementalDecoder = decoder(errors="ignore")
+ seen_ranges: Dict[str, int] = {}
+ character_count: int = 0
+
+ for i in range(0x40, 0xFF):
+ chunk: str = p.decode(bytes([i]))
+
+ if chunk:
+ character_range: Optional[str] = unicode_range(chunk)
+
+ if character_range is None:
+ continue
+
+ if is_unicode_range_secondary(character_range) is False:
+ if character_range not in seen_ranges:
+ seen_ranges[character_range] = 0
+ seen_ranges[character_range] += 1
+ character_count += 1
+
+ return sorted(
+ [
+ character_range
+ for character_range in seen_ranges
+ if seen_ranges[character_range] / character_count >= 0.15
+ ]
+ )
+
+
+def unicode_range_languages(primary_range: str) -> List[str]:
+ """
+ Return inferred languages used with a unicode range.
+ """
+ languages: List[str] = []
+
+ for language, characters in FREQUENCIES.items():
+ for character in characters:
+ if unicode_range(character) == primary_range:
+ languages.append(language)
+ break
+
+ return languages
+
+
+@lru_cache()
+def encoding_languages(iana_name: str) -> List[str]:
+ """
+ Single-byte encoding language association. Some code page are heavily linked to particular language(s).
+ This function does the correspondence.
+ """
+ unicode_ranges: List[str] = encoding_unicode_range(iana_name)
+ primary_range: Optional[str] = None
+
+ for specified_range in unicode_ranges:
+ if "Latin" not in specified_range:
+ primary_range = specified_range
+ break
+
+ if primary_range is None:
+ return ["Latin Based"]
+
+ return unicode_range_languages(primary_range)
+
+
+@lru_cache()
+def mb_encoding_languages(iana_name: str) -> List[str]:
+ """
+ Multi-byte encoding language association. Some code page are heavily linked to particular language(s).
+ This function does the correspondence.
+ """
+ if (
+ iana_name.startswith("shift_")
+ or iana_name.startswith("iso2022_jp")
+ or iana_name.startswith("euc_j")
+ or iana_name == "cp932"
+ ):
+ return ["Japanese"]
+ if iana_name.startswith("gb") or iana_name in ZH_NAMES:
+ return ["Chinese"]
+ if iana_name.startswith("iso2022_kr") or iana_name in KO_NAMES:
+ return ["Korean"]
+
+ return []
+
+
+@lru_cache(maxsize=LANGUAGE_SUPPORTED_COUNT)
+def get_target_features(language: str) -> Tuple[bool, bool]:
+ """
+ Determine main aspects from a supported language if it contains accents and if is pure Latin.
+ """
+ target_have_accents: bool = False
+ target_pure_latin: bool = True
+
+ for character in FREQUENCIES[language]:
+ if not target_have_accents and is_accentuated(character):
+ target_have_accents = True
+ if target_pure_latin and is_latin(character) is False:
+ target_pure_latin = False
+
+ return target_have_accents, target_pure_latin
+
+
+def alphabet_languages(
+ characters: List[str], ignore_non_latin: bool = False
+) -> List[str]:
+ """
+ Return associated languages associated to given characters.
+ """
+ languages: List[Tuple[str, float]] = []
+
+ source_have_accents = any(is_accentuated(character) for character in characters)
+
+ for language, language_characters in FREQUENCIES.items():
+ target_have_accents, target_pure_latin = get_target_features(language)
+
+ if ignore_non_latin and target_pure_latin is False:
+ continue
+
+ if target_have_accents is False and source_have_accents:
+ continue
+
+ character_count: int = len(language_characters)
+
+ character_match_count: int = len(
+ [c for c in language_characters if c in characters]
+ )
+
+ ratio: float = character_match_count / character_count
+
+ if ratio >= 0.2:
+ languages.append((language, ratio))
+
+ languages = sorted(languages, key=lambda x: x[1], reverse=True)
+
+ return [compatible_language[0] for compatible_language in languages]
+
+
+def characters_popularity_compare(
+ language: str, ordered_characters: List[str]
+) -> float:
+ """
+ Determine if a ordered characters list (by occurrence from most appearance to rarest) match a particular language.
+ The result is a ratio between 0. (absolutely no correspondence) and 1. (near perfect fit).
+ Beware that is function is not strict on the match in order to ease the detection. (Meaning close match is 1.)
+ """
+ if language not in FREQUENCIES:
+ raise ValueError("{} not available".format(language))
+
+ character_approved_count: int = 0
+ FREQUENCIES_language_set = set(FREQUENCIES[language])
+
+ ordered_characters_count: int = len(ordered_characters)
+ target_language_characters_count: int = len(FREQUENCIES[language])
+
+ large_alphabet: bool = target_language_characters_count > 26
+
+ for character, character_rank in zip(
+ ordered_characters, range(0, ordered_characters_count)
+ ):
+ if character not in FREQUENCIES_language_set:
+ continue
+
+ character_rank_in_language: int = FREQUENCIES[language].index(character)
+ expected_projection_ratio: float = (
+ target_language_characters_count / ordered_characters_count
+ )
+ character_rank_projection: int = int(character_rank * expected_projection_ratio)
+
+ if (
+ large_alphabet is False
+ and abs(character_rank_projection - character_rank_in_language) > 4
+ ):
+ continue
+
+ if (
+ large_alphabet is True
+ and abs(character_rank_projection - character_rank_in_language)
+ < target_language_characters_count / 3
+ ):
+ character_approved_count += 1
+ continue
+
+ characters_before_source: List[str] = FREQUENCIES[language][
+ 0:character_rank_in_language
+ ]
+ characters_after_source: List[str] = FREQUENCIES[language][
+ character_rank_in_language:
+ ]
+ characters_before: List[str] = ordered_characters[0:character_rank]
+ characters_after: List[str] = ordered_characters[character_rank:]
+
+ before_match_count: int = len(
+ set(characters_before) & set(characters_before_source)
+ )
+
+ after_match_count: int = len(
+ set(characters_after) & set(characters_after_source)
+ )
+
+ if len(characters_before_source) == 0 and before_match_count <= 4:
+ character_approved_count += 1
+ continue
+
+ if len(characters_after_source) == 0 and after_match_count <= 4:
+ character_approved_count += 1
+ continue
+
+ if (
+ before_match_count / len(characters_before_source) >= 0.4
+ or after_match_count / len(characters_after_source) >= 0.4
+ ):
+ character_approved_count += 1
+ continue
+
+ return character_approved_count / len(ordered_characters)
+
+
+def alpha_unicode_split(decoded_sequence: str) -> List[str]:
+ """
+ Given a decoded text sequence, return a list of str. Unicode range / alphabet separation.
+ Ex. a text containing English/Latin with a bit a Hebrew will return two items in the resulting list;
+ One containing the latin letters and the other hebrew.
+ """
+ layers: Dict[str, str] = {}
+
+ for character in decoded_sequence:
+ if character.isalpha() is False:
+ continue
+
+ character_range: Optional[str] = unicode_range(character)
+
+ if character_range is None:
+ continue
+
+ layer_target_range: Optional[str] = None
+
+ for discovered_range in layers:
+ if (
+ is_suspiciously_successive_range(discovered_range, character_range)
+ is False
+ ):
+ layer_target_range = discovered_range
+ break
+
+ if layer_target_range is None:
+ layer_target_range = character_range
+
+ if layer_target_range not in layers:
+ layers[layer_target_range] = character.lower()
+ continue
+
+ layers[layer_target_range] += character.lower()
+
+ return list(layers.values())
+
+
+def merge_coherence_ratios(results: List[CoherenceMatches]) -> CoherenceMatches:
+ """
+ This function merge results previously given by the function coherence_ratio.
+ The return type is the same as coherence_ratio.
+ """
+ per_language_ratios: Dict[str, List[float]] = {}
+ for result in results:
+ for sub_result in result:
+ language, ratio = sub_result
+ if language not in per_language_ratios:
+ per_language_ratios[language] = [ratio]
+ continue
+ per_language_ratios[language].append(ratio)
+
+ merge = [
+ (
+ language,
+ round(
+ sum(per_language_ratios[language]) / len(per_language_ratios[language]),
+ 4,
+ ),
+ )
+ for language in per_language_ratios
+ ]
+
+ return sorted(merge, key=lambda x: x[1], reverse=True)
+
+
+def filter_alt_coherence_matches(results: CoherenceMatches) -> CoherenceMatches:
+ """
+ We shall NOT return "English—" in CoherenceMatches because it is an alternative
+ of "English". This function only keeps the best match and remove the em-dash in it.
+ """
+ index_results: Dict[str, List[float]] = dict()
+
+ for result in results:
+ language, ratio = result
+ no_em_name: str = language.replace("—", "")
+
+ if no_em_name not in index_results:
+ index_results[no_em_name] = []
+
+ index_results[no_em_name].append(ratio)
+
+ if any(len(index_results[e]) > 1 for e in index_results):
+ filtered_results: CoherenceMatches = []
+
+ for language in index_results:
+ filtered_results.append((language, max(index_results[language])))
+
+ return filtered_results
+
+ return results
+
+
+@lru_cache(maxsize=2048)
+def coherence_ratio(
+ decoded_sequence: str, threshold: float = 0.1, lg_inclusion: Optional[str] = None
+) -> CoherenceMatches:
+ """
+ Detect ANY language that can be identified in given sequence. The sequence will be analysed by layers.
+ A layer = Character extraction by alphabets/ranges.
+ """
+
+ results: List[Tuple[str, float]] = []
+ ignore_non_latin: bool = False
+
+ sufficient_match_count: int = 0
+
+ lg_inclusion_list = lg_inclusion.split(",") if lg_inclusion is not None else []
+ if "Latin Based" in lg_inclusion_list:
+ ignore_non_latin = True
+ lg_inclusion_list.remove("Latin Based")
+
+ for layer in alpha_unicode_split(decoded_sequence):
+ sequence_frequencies: TypeCounter[str] = Counter(layer)
+ most_common = sequence_frequencies.most_common()
+
+ character_count: int = sum(o for c, o in most_common)
+
+ if character_count <= TOO_SMALL_SEQUENCE:
+ continue
+
+ popular_character_ordered: List[str] = [c for c, o in most_common]
+
+ for language in lg_inclusion_list or alphabet_languages(
+ popular_character_ordered, ignore_non_latin
+ ):
+ ratio: float = characters_popularity_compare(
+ language, popular_character_ordered
+ )
+
+ if ratio < threshold:
+ continue
+ elif ratio >= 0.8:
+ sufficient_match_count += 1
+
+ results.append((language, round(ratio, 4)))
+
+ if sufficient_match_count >= 3:
+ break
+
+ return sorted(
+ filter_alt_coherence_matches(results), key=lambda x: x[1], reverse=True
+ )
diff --git a/lambdas/aws-dd-forwarder-3.127.0/charset_normalizer/cli/__init__.py b/lambdas/aws-dd-forwarder-3.127.0/charset_normalizer/cli/__init__.py
new file mode 100644
index 0000000..d95fedf
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/charset_normalizer/cli/__init__.py
@@ -0,0 +1,6 @@
+from .__main__ import cli_detect, query_yes_no
+
+__all__ = (
+ "cli_detect",
+ "query_yes_no",
+)
diff --git a/lambdas/aws-dd-forwarder-3.127.0/charset_normalizer/cli/__main__.py b/lambdas/aws-dd-forwarder-3.127.0/charset_normalizer/cli/__main__.py
new file mode 100644
index 0000000..e7edd0f
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/charset_normalizer/cli/__main__.py
@@ -0,0 +1,320 @@
+import argparse
+import sys
+from json import dumps
+from os.path import abspath, basename, dirname, join, realpath
+from platform import python_version
+from typing import List, Optional
+from unicodedata import unidata_version
+
+import charset_normalizer.md as md_module
+from charset_normalizer import from_fp
+from charset_normalizer.models import CliDetectionResult
+from charset_normalizer.version import __version__
+
+
+def query_yes_no(question: str, default: str = "yes") -> bool:
+ """Ask a yes/no question via input() and return their answer.
+
+ "question" is a string that is presented to the user.
+ "default" is the presumed answer if the user just hits .
+ It must be "yes" (the default), "no" or None (meaning
+ an answer is required of the user).
+
+ The "answer" return value is True for "yes" or False for "no".
+
+ Credit goes to (c) https://stackoverflow.com/questions/3041986/apt-command-line-interface-like-yes-no-input
+ """
+ valid = {"yes": True, "y": True, "ye": True, "no": False, "n": False}
+ if default is None:
+ prompt = " [y/n] "
+ elif default == "yes":
+ prompt = " [Y/n] "
+ elif default == "no":
+ prompt = " [y/N] "
+ else:
+ raise ValueError("invalid default answer: '%s'" % default)
+
+ while True:
+ sys.stdout.write(question + prompt)
+ choice = input().lower()
+ if default is not None and choice == "":
+ return valid[default]
+ elif choice in valid:
+ return valid[choice]
+ else:
+ sys.stdout.write("Please respond with 'yes' or 'no' " "(or 'y' or 'n').\n")
+
+
+def cli_detect(argv: Optional[List[str]] = None) -> int:
+ """
+ CLI assistant using ARGV and ArgumentParser
+ :param argv:
+ :return: 0 if everything is fine, anything else equal trouble
+ """
+ parser = argparse.ArgumentParser(
+ description="The Real First Universal Charset Detector. "
+ "Discover originating encoding used on text file. "
+ "Normalize text to unicode."
+ )
+
+ parser.add_argument(
+ "files", type=argparse.FileType("rb"), nargs="+", help="File(s) to be analysed"
+ )
+ parser.add_argument(
+ "-v",
+ "--verbose",
+ action="store_true",
+ default=False,
+ dest="verbose",
+ help="Display complementary information about file if any. "
+ "Stdout will contain logs about the detection process.",
+ )
+ parser.add_argument(
+ "-a",
+ "--with-alternative",
+ action="store_true",
+ default=False,
+ dest="alternatives",
+ help="Output complementary possibilities if any. Top-level JSON WILL be a list.",
+ )
+ parser.add_argument(
+ "-n",
+ "--normalize",
+ action="store_true",
+ default=False,
+ dest="normalize",
+ help="Permit to normalize input file. If not set, program does not write anything.",
+ )
+ parser.add_argument(
+ "-m",
+ "--minimal",
+ action="store_true",
+ default=False,
+ dest="minimal",
+ help="Only output the charset detected to STDOUT. Disabling JSON output.",
+ )
+ parser.add_argument(
+ "-r",
+ "--replace",
+ action="store_true",
+ default=False,
+ dest="replace",
+ help="Replace file when trying to normalize it instead of creating a new one.",
+ )
+ parser.add_argument(
+ "-f",
+ "--force",
+ action="store_true",
+ default=False,
+ dest="force",
+ help="Replace file without asking if you are sure, use this flag with caution.",
+ )
+ parser.add_argument(
+ "-i",
+ "--no-preemptive",
+ action="store_true",
+ default=False,
+ dest="no_preemptive",
+ help="Disable looking at a charset declaration to hint the detector.",
+ )
+ parser.add_argument(
+ "-t",
+ "--threshold",
+ action="store",
+ default=0.2,
+ type=float,
+ dest="threshold",
+ help="Define a custom maximum amount of chaos allowed in decoded content. 0. <= chaos <= 1.",
+ )
+ parser.add_argument(
+ "--version",
+ action="version",
+ version="Charset-Normalizer {} - Python {} - Unicode {} - SpeedUp {}".format(
+ __version__,
+ python_version(),
+ unidata_version,
+ "OFF" if md_module.__file__.lower().endswith(".py") else "ON",
+ ),
+ help="Show version information and exit.",
+ )
+
+ args = parser.parse_args(argv)
+
+ if args.replace is True and args.normalize is False:
+ if args.files:
+ for my_file in args.files:
+ my_file.close()
+ print("Use --replace in addition of --normalize only.", file=sys.stderr)
+ return 1
+
+ if args.force is True and args.replace is False:
+ if args.files:
+ for my_file in args.files:
+ my_file.close()
+ print("Use --force in addition of --replace only.", file=sys.stderr)
+ return 1
+
+ if args.threshold < 0.0 or args.threshold > 1.0:
+ if args.files:
+ for my_file in args.files:
+ my_file.close()
+ print("--threshold VALUE should be between 0. AND 1.", file=sys.stderr)
+ return 1
+
+ x_ = []
+
+ for my_file in args.files:
+ matches = from_fp(
+ my_file,
+ threshold=args.threshold,
+ explain=args.verbose,
+ preemptive_behaviour=args.no_preemptive is False,
+ )
+
+ best_guess = matches.best()
+
+ if best_guess is None:
+ print(
+ 'Unable to identify originating encoding for "{}". {}'.format(
+ my_file.name,
+ (
+ "Maybe try increasing maximum amount of chaos."
+ if args.threshold < 1.0
+ else ""
+ ),
+ ),
+ file=sys.stderr,
+ )
+ x_.append(
+ CliDetectionResult(
+ abspath(my_file.name),
+ None,
+ [],
+ [],
+ "Unknown",
+ [],
+ False,
+ 1.0,
+ 0.0,
+ None,
+ True,
+ )
+ )
+ else:
+ x_.append(
+ CliDetectionResult(
+ abspath(my_file.name),
+ best_guess.encoding,
+ best_guess.encoding_aliases,
+ [
+ cp
+ for cp in best_guess.could_be_from_charset
+ if cp != best_guess.encoding
+ ],
+ best_guess.language,
+ best_guess.alphabets,
+ best_guess.bom,
+ best_guess.percent_chaos,
+ best_guess.percent_coherence,
+ None,
+ True,
+ )
+ )
+
+ if len(matches) > 1 and args.alternatives:
+ for el in matches:
+ if el != best_guess:
+ x_.append(
+ CliDetectionResult(
+ abspath(my_file.name),
+ el.encoding,
+ el.encoding_aliases,
+ [
+ cp
+ for cp in el.could_be_from_charset
+ if cp != el.encoding
+ ],
+ el.language,
+ el.alphabets,
+ el.bom,
+ el.percent_chaos,
+ el.percent_coherence,
+ None,
+ False,
+ )
+ )
+
+ if args.normalize is True:
+ if best_guess.encoding.startswith("utf") is True:
+ print(
+ '"{}" file does not need to be normalized, as it already came from unicode.'.format(
+ my_file.name
+ ),
+ file=sys.stderr,
+ )
+ if my_file.closed is False:
+ my_file.close()
+ continue
+
+ dir_path = dirname(realpath(my_file.name))
+ file_name = basename(realpath(my_file.name))
+
+ o_: List[str] = file_name.split(".")
+
+ if args.replace is False:
+ o_.insert(-1, best_guess.encoding)
+ if my_file.closed is False:
+ my_file.close()
+ elif (
+ args.force is False
+ and query_yes_no(
+ 'Are you sure to normalize "{}" by replacing it ?'.format(
+ my_file.name
+ ),
+ "no",
+ )
+ is False
+ ):
+ if my_file.closed is False:
+ my_file.close()
+ continue
+
+ try:
+ x_[0].unicode_path = join(dir_path, ".".join(o_))
+
+ with open(x_[0].unicode_path, "wb") as fp:
+ fp.write(best_guess.output())
+ except IOError as e:
+ print(str(e), file=sys.stderr)
+ if my_file.closed is False:
+ my_file.close()
+ return 2
+
+ if my_file.closed is False:
+ my_file.close()
+
+ if args.minimal is False:
+ print(
+ dumps(
+ [el.__dict__ for el in x_] if len(x_) > 1 else x_[0].__dict__,
+ ensure_ascii=True,
+ indent=4,
+ )
+ )
+ else:
+ for my_file in args.files:
+ print(
+ ", ".join(
+ [
+ el.encoding or "undefined"
+ for el in x_
+ if el.path == abspath(my_file.name)
+ ]
+ )
+ )
+
+ return 0
+
+
+if __name__ == "__main__":
+ cli_detect()
diff --git a/lambdas/aws-dd-forwarder-3.127.0/charset_normalizer/constant.py b/lambdas/aws-dd-forwarder-3.127.0/charset_normalizer/constant.py
new file mode 100644
index 0000000..f8f2a81
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/charset_normalizer/constant.py
@@ -0,0 +1,1997 @@
+# -*- coding: utf-8 -*-
+from codecs import BOM_UTF8, BOM_UTF16_BE, BOM_UTF16_LE, BOM_UTF32_BE, BOM_UTF32_LE
+from encodings.aliases import aliases
+from re import IGNORECASE, compile as re_compile
+from typing import Dict, List, Set, Union
+
+# Contain for each eligible encoding a list of/item bytes SIG/BOM
+ENCODING_MARKS: Dict[str, Union[bytes, List[bytes]]] = {
+ "utf_8": BOM_UTF8,
+ "utf_7": [
+ b"\x2b\x2f\x76\x38",
+ b"\x2b\x2f\x76\x39",
+ b"\x2b\x2f\x76\x2b",
+ b"\x2b\x2f\x76\x2f",
+ b"\x2b\x2f\x76\x38\x2d",
+ ],
+ "gb18030": b"\x84\x31\x95\x33",
+ "utf_32": [BOM_UTF32_BE, BOM_UTF32_LE],
+ "utf_16": [BOM_UTF16_BE, BOM_UTF16_LE],
+}
+
+TOO_SMALL_SEQUENCE: int = 32
+TOO_BIG_SEQUENCE: int = int(10e6)
+
+UTF8_MAXIMAL_ALLOCATION: int = 1_112_064
+
+# Up-to-date Unicode ucd/15.0.0
+UNICODE_RANGES_COMBINED: Dict[str, range] = {
+ "Control character": range(32),
+ "Basic Latin": range(32, 128),
+ "Latin-1 Supplement": range(128, 256),
+ "Latin Extended-A": range(256, 384),
+ "Latin Extended-B": range(384, 592),
+ "IPA Extensions": range(592, 688),
+ "Spacing Modifier Letters": range(688, 768),
+ "Combining Diacritical Marks": range(768, 880),
+ "Greek and Coptic": range(880, 1024),
+ "Cyrillic": range(1024, 1280),
+ "Cyrillic Supplement": range(1280, 1328),
+ "Armenian": range(1328, 1424),
+ "Hebrew": range(1424, 1536),
+ "Arabic": range(1536, 1792),
+ "Syriac": range(1792, 1872),
+ "Arabic Supplement": range(1872, 1920),
+ "Thaana": range(1920, 1984),
+ "NKo": range(1984, 2048),
+ "Samaritan": range(2048, 2112),
+ "Mandaic": range(2112, 2144),
+ "Syriac Supplement": range(2144, 2160),
+ "Arabic Extended-B": range(2160, 2208),
+ "Arabic Extended-A": range(2208, 2304),
+ "Devanagari": range(2304, 2432),
+ "Bengali": range(2432, 2560),
+ "Gurmukhi": range(2560, 2688),
+ "Gujarati": range(2688, 2816),
+ "Oriya": range(2816, 2944),
+ "Tamil": range(2944, 3072),
+ "Telugu": range(3072, 3200),
+ "Kannada": range(3200, 3328),
+ "Malayalam": range(3328, 3456),
+ "Sinhala": range(3456, 3584),
+ "Thai": range(3584, 3712),
+ "Lao": range(3712, 3840),
+ "Tibetan": range(3840, 4096),
+ "Myanmar": range(4096, 4256),
+ "Georgian": range(4256, 4352),
+ "Hangul Jamo": range(4352, 4608),
+ "Ethiopic": range(4608, 4992),
+ "Ethiopic Supplement": range(4992, 5024),
+ "Cherokee": range(5024, 5120),
+ "Unified Canadian Aboriginal Syllabics": range(5120, 5760),
+ "Ogham": range(5760, 5792),
+ "Runic": range(5792, 5888),
+ "Tagalog": range(5888, 5920),
+ "Hanunoo": range(5920, 5952),
+ "Buhid": range(5952, 5984),
+ "Tagbanwa": range(5984, 6016),
+ "Khmer": range(6016, 6144),
+ "Mongolian": range(6144, 6320),
+ "Unified Canadian Aboriginal Syllabics Extended": range(6320, 6400),
+ "Limbu": range(6400, 6480),
+ "Tai Le": range(6480, 6528),
+ "New Tai Lue": range(6528, 6624),
+ "Khmer Symbols": range(6624, 6656),
+ "Buginese": range(6656, 6688),
+ "Tai Tham": range(6688, 6832),
+ "Combining Diacritical Marks Extended": range(6832, 6912),
+ "Balinese": range(6912, 7040),
+ "Sundanese": range(7040, 7104),
+ "Batak": range(7104, 7168),
+ "Lepcha": range(7168, 7248),
+ "Ol Chiki": range(7248, 7296),
+ "Cyrillic Extended-C": range(7296, 7312),
+ "Georgian Extended": range(7312, 7360),
+ "Sundanese Supplement": range(7360, 7376),
+ "Vedic Extensions": range(7376, 7424),
+ "Phonetic Extensions": range(7424, 7552),
+ "Phonetic Extensions Supplement": range(7552, 7616),
+ "Combining Diacritical Marks Supplement": range(7616, 7680),
+ "Latin Extended Additional": range(7680, 7936),
+ "Greek Extended": range(7936, 8192),
+ "General Punctuation": range(8192, 8304),
+ "Superscripts and Subscripts": range(8304, 8352),
+ "Currency Symbols": range(8352, 8400),
+ "Combining Diacritical Marks for Symbols": range(8400, 8448),
+ "Letterlike Symbols": range(8448, 8528),
+ "Number Forms": range(8528, 8592),
+ "Arrows": range(8592, 8704),
+ "Mathematical Operators": range(8704, 8960),
+ "Miscellaneous Technical": range(8960, 9216),
+ "Control Pictures": range(9216, 9280),
+ "Optical Character Recognition": range(9280, 9312),
+ "Enclosed Alphanumerics": range(9312, 9472),
+ "Box Drawing": range(9472, 9600),
+ "Block Elements": range(9600, 9632),
+ "Geometric Shapes": range(9632, 9728),
+ "Miscellaneous Symbols": range(9728, 9984),
+ "Dingbats": range(9984, 10176),
+ "Miscellaneous Mathematical Symbols-A": range(10176, 10224),
+ "Supplemental Arrows-A": range(10224, 10240),
+ "Braille Patterns": range(10240, 10496),
+ "Supplemental Arrows-B": range(10496, 10624),
+ "Miscellaneous Mathematical Symbols-B": range(10624, 10752),
+ "Supplemental Mathematical Operators": range(10752, 11008),
+ "Miscellaneous Symbols and Arrows": range(11008, 11264),
+ "Glagolitic": range(11264, 11360),
+ "Latin Extended-C": range(11360, 11392),
+ "Coptic": range(11392, 11520),
+ "Georgian Supplement": range(11520, 11568),
+ "Tifinagh": range(11568, 11648),
+ "Ethiopic Extended": range(11648, 11744),
+ "Cyrillic Extended-A": range(11744, 11776),
+ "Supplemental Punctuation": range(11776, 11904),
+ "CJK Radicals Supplement": range(11904, 12032),
+ "Kangxi Radicals": range(12032, 12256),
+ "Ideographic Description Characters": range(12272, 12288),
+ "CJK Symbols and Punctuation": range(12288, 12352),
+ "Hiragana": range(12352, 12448),
+ "Katakana": range(12448, 12544),
+ "Bopomofo": range(12544, 12592),
+ "Hangul Compatibility Jamo": range(12592, 12688),
+ "Kanbun": range(12688, 12704),
+ "Bopomofo Extended": range(12704, 12736),
+ "CJK Strokes": range(12736, 12784),
+ "Katakana Phonetic Extensions": range(12784, 12800),
+ "Enclosed CJK Letters and Months": range(12800, 13056),
+ "CJK Compatibility": range(13056, 13312),
+ "CJK Unified Ideographs Extension A": range(13312, 19904),
+ "Yijing Hexagram Symbols": range(19904, 19968),
+ "CJK Unified Ideographs": range(19968, 40960),
+ "Yi Syllables": range(40960, 42128),
+ "Yi Radicals": range(42128, 42192),
+ "Lisu": range(42192, 42240),
+ "Vai": range(42240, 42560),
+ "Cyrillic Extended-B": range(42560, 42656),
+ "Bamum": range(42656, 42752),
+ "Modifier Tone Letters": range(42752, 42784),
+ "Latin Extended-D": range(42784, 43008),
+ "Syloti Nagri": range(43008, 43056),
+ "Common Indic Number Forms": range(43056, 43072),
+ "Phags-pa": range(43072, 43136),
+ "Saurashtra": range(43136, 43232),
+ "Devanagari Extended": range(43232, 43264),
+ "Kayah Li": range(43264, 43312),
+ "Rejang": range(43312, 43360),
+ "Hangul Jamo Extended-A": range(43360, 43392),
+ "Javanese": range(43392, 43488),
+ "Myanmar Extended-B": range(43488, 43520),
+ "Cham": range(43520, 43616),
+ "Myanmar Extended-A": range(43616, 43648),
+ "Tai Viet": range(43648, 43744),
+ "Meetei Mayek Extensions": range(43744, 43776),
+ "Ethiopic Extended-A": range(43776, 43824),
+ "Latin Extended-E": range(43824, 43888),
+ "Cherokee Supplement": range(43888, 43968),
+ "Meetei Mayek": range(43968, 44032),
+ "Hangul Syllables": range(44032, 55216),
+ "Hangul Jamo Extended-B": range(55216, 55296),
+ "High Surrogates": range(55296, 56192),
+ "High Private Use Surrogates": range(56192, 56320),
+ "Low Surrogates": range(56320, 57344),
+ "Private Use Area": range(57344, 63744),
+ "CJK Compatibility Ideographs": range(63744, 64256),
+ "Alphabetic Presentation Forms": range(64256, 64336),
+ "Arabic Presentation Forms-A": range(64336, 65024),
+ "Variation Selectors": range(65024, 65040),
+ "Vertical Forms": range(65040, 65056),
+ "Combining Half Marks": range(65056, 65072),
+ "CJK Compatibility Forms": range(65072, 65104),
+ "Small Form Variants": range(65104, 65136),
+ "Arabic Presentation Forms-B": range(65136, 65280),
+ "Halfwidth and Fullwidth Forms": range(65280, 65520),
+ "Specials": range(65520, 65536),
+ "Linear B Syllabary": range(65536, 65664),
+ "Linear B Ideograms": range(65664, 65792),
+ "Aegean Numbers": range(65792, 65856),
+ "Ancient Greek Numbers": range(65856, 65936),
+ "Ancient Symbols": range(65936, 66000),
+ "Phaistos Disc": range(66000, 66048),
+ "Lycian": range(66176, 66208),
+ "Carian": range(66208, 66272),
+ "Coptic Epact Numbers": range(66272, 66304),
+ "Old Italic": range(66304, 66352),
+ "Gothic": range(66352, 66384),
+ "Old Permic": range(66384, 66432),
+ "Ugaritic": range(66432, 66464),
+ "Old Persian": range(66464, 66528),
+ "Deseret": range(66560, 66640),
+ "Shavian": range(66640, 66688),
+ "Osmanya": range(66688, 66736),
+ "Osage": range(66736, 66816),
+ "Elbasan": range(66816, 66864),
+ "Caucasian Albanian": range(66864, 66928),
+ "Vithkuqi": range(66928, 67008),
+ "Linear A": range(67072, 67456),
+ "Latin Extended-F": range(67456, 67520),
+ "Cypriot Syllabary": range(67584, 67648),
+ "Imperial Aramaic": range(67648, 67680),
+ "Palmyrene": range(67680, 67712),
+ "Nabataean": range(67712, 67760),
+ "Hatran": range(67808, 67840),
+ "Phoenician": range(67840, 67872),
+ "Lydian": range(67872, 67904),
+ "Meroitic Hieroglyphs": range(67968, 68000),
+ "Meroitic Cursive": range(68000, 68096),
+ "Kharoshthi": range(68096, 68192),
+ "Old South Arabian": range(68192, 68224),
+ "Old North Arabian": range(68224, 68256),
+ "Manichaean": range(68288, 68352),
+ "Avestan": range(68352, 68416),
+ "Inscriptional Parthian": range(68416, 68448),
+ "Inscriptional Pahlavi": range(68448, 68480),
+ "Psalter Pahlavi": range(68480, 68528),
+ "Old Turkic": range(68608, 68688),
+ "Old Hungarian": range(68736, 68864),
+ "Hanifi Rohingya": range(68864, 68928),
+ "Rumi Numeral Symbols": range(69216, 69248),
+ "Yezidi": range(69248, 69312),
+ "Arabic Extended-C": range(69312, 69376),
+ "Old Sogdian": range(69376, 69424),
+ "Sogdian": range(69424, 69488),
+ "Old Uyghur": range(69488, 69552),
+ "Chorasmian": range(69552, 69600),
+ "Elymaic": range(69600, 69632),
+ "Brahmi": range(69632, 69760),
+ "Kaithi": range(69760, 69840),
+ "Sora Sompeng": range(69840, 69888),
+ "Chakma": range(69888, 69968),
+ "Mahajani": range(69968, 70016),
+ "Sharada": range(70016, 70112),
+ "Sinhala Archaic Numbers": range(70112, 70144),
+ "Khojki": range(70144, 70224),
+ "Multani": range(70272, 70320),
+ "Khudawadi": range(70320, 70400),
+ "Grantha": range(70400, 70528),
+ "Newa": range(70656, 70784),
+ "Tirhuta": range(70784, 70880),
+ "Siddham": range(71040, 71168),
+ "Modi": range(71168, 71264),
+ "Mongolian Supplement": range(71264, 71296),
+ "Takri": range(71296, 71376),
+ "Ahom": range(71424, 71504),
+ "Dogra": range(71680, 71760),
+ "Warang Citi": range(71840, 71936),
+ "Dives Akuru": range(71936, 72032),
+ "Nandinagari": range(72096, 72192),
+ "Zanabazar Square": range(72192, 72272),
+ "Soyombo": range(72272, 72368),
+ "Unified Canadian Aboriginal Syllabics Extended-A": range(72368, 72384),
+ "Pau Cin Hau": range(72384, 72448),
+ "Devanagari Extended-A": range(72448, 72544),
+ "Bhaiksuki": range(72704, 72816),
+ "Marchen": range(72816, 72896),
+ "Masaram Gondi": range(72960, 73056),
+ "Gunjala Gondi": range(73056, 73136),
+ "Makasar": range(73440, 73472),
+ "Kawi": range(73472, 73568),
+ "Lisu Supplement": range(73648, 73664),
+ "Tamil Supplement": range(73664, 73728),
+ "Cuneiform": range(73728, 74752),
+ "Cuneiform Numbers and Punctuation": range(74752, 74880),
+ "Early Dynastic Cuneiform": range(74880, 75088),
+ "Cypro-Minoan": range(77712, 77824),
+ "Egyptian Hieroglyphs": range(77824, 78896),
+ "Egyptian Hieroglyph Format Controls": range(78896, 78944),
+ "Anatolian Hieroglyphs": range(82944, 83584),
+ "Bamum Supplement": range(92160, 92736),
+ "Mro": range(92736, 92784),
+ "Tangsa": range(92784, 92880),
+ "Bassa Vah": range(92880, 92928),
+ "Pahawh Hmong": range(92928, 93072),
+ "Medefaidrin": range(93760, 93856),
+ "Miao": range(93952, 94112),
+ "Ideographic Symbols and Punctuation": range(94176, 94208),
+ "Tangut": range(94208, 100352),
+ "Tangut Components": range(100352, 101120),
+ "Khitan Small Script": range(101120, 101632),
+ "Tangut Supplement": range(101632, 101760),
+ "Kana Extended-B": range(110576, 110592),
+ "Kana Supplement": range(110592, 110848),
+ "Kana Extended-A": range(110848, 110896),
+ "Small Kana Extension": range(110896, 110960),
+ "Nushu": range(110960, 111360),
+ "Duployan": range(113664, 113824),
+ "Shorthand Format Controls": range(113824, 113840),
+ "Znamenny Musical Notation": range(118528, 118736),
+ "Byzantine Musical Symbols": range(118784, 119040),
+ "Musical Symbols": range(119040, 119296),
+ "Ancient Greek Musical Notation": range(119296, 119376),
+ "Kaktovik Numerals": range(119488, 119520),
+ "Mayan Numerals": range(119520, 119552),
+ "Tai Xuan Jing Symbols": range(119552, 119648),
+ "Counting Rod Numerals": range(119648, 119680),
+ "Mathematical Alphanumeric Symbols": range(119808, 120832),
+ "Sutton SignWriting": range(120832, 121520),
+ "Latin Extended-G": range(122624, 122880),
+ "Glagolitic Supplement": range(122880, 122928),
+ "Cyrillic Extended-D": range(122928, 123024),
+ "Nyiakeng Puachue Hmong": range(123136, 123216),
+ "Toto": range(123536, 123584),
+ "Wancho": range(123584, 123648),
+ "Nag Mundari": range(124112, 124160),
+ "Ethiopic Extended-B": range(124896, 124928),
+ "Mende Kikakui": range(124928, 125152),
+ "Adlam": range(125184, 125280),
+ "Indic Siyaq Numbers": range(126064, 126144),
+ "Ottoman Siyaq Numbers": range(126208, 126288),
+ "Arabic Mathematical Alphabetic Symbols": range(126464, 126720),
+ "Mahjong Tiles": range(126976, 127024),
+ "Domino Tiles": range(127024, 127136),
+ "Playing Cards": range(127136, 127232),
+ "Enclosed Alphanumeric Supplement": range(127232, 127488),
+ "Enclosed Ideographic Supplement": range(127488, 127744),
+ "Miscellaneous Symbols and Pictographs": range(127744, 128512),
+ "Emoticons range(Emoji)": range(128512, 128592),
+ "Ornamental Dingbats": range(128592, 128640),
+ "Transport and Map Symbols": range(128640, 128768),
+ "Alchemical Symbols": range(128768, 128896),
+ "Geometric Shapes Extended": range(128896, 129024),
+ "Supplemental Arrows-C": range(129024, 129280),
+ "Supplemental Symbols and Pictographs": range(129280, 129536),
+ "Chess Symbols": range(129536, 129648),
+ "Symbols and Pictographs Extended-A": range(129648, 129792),
+ "Symbols for Legacy Computing": range(129792, 130048),
+ "CJK Unified Ideographs Extension B": range(131072, 173792),
+ "CJK Unified Ideographs Extension C": range(173824, 177984),
+ "CJK Unified Ideographs Extension D": range(177984, 178208),
+ "CJK Unified Ideographs Extension E": range(178208, 183984),
+ "CJK Unified Ideographs Extension F": range(183984, 191472),
+ "CJK Compatibility Ideographs Supplement": range(194560, 195104),
+ "CJK Unified Ideographs Extension G": range(196608, 201552),
+ "CJK Unified Ideographs Extension H": range(201552, 205744),
+ "Tags": range(917504, 917632),
+ "Variation Selectors Supplement": range(917760, 918000),
+ "Supplementary Private Use Area-A": range(983040, 1048576),
+ "Supplementary Private Use Area-B": range(1048576, 1114112),
+}
+
+
+UNICODE_SECONDARY_RANGE_KEYWORD: List[str] = [
+ "Supplement",
+ "Extended",
+ "Extensions",
+ "Modifier",
+ "Marks",
+ "Punctuation",
+ "Symbols",
+ "Forms",
+ "Operators",
+ "Miscellaneous",
+ "Drawing",
+ "Block",
+ "Shapes",
+ "Supplemental",
+ "Tags",
+]
+
+RE_POSSIBLE_ENCODING_INDICATION = re_compile(
+ r"(?:(?:encoding)|(?:charset)|(?:coding))(?:[\:= ]{1,10})(?:[\"\']?)([a-zA-Z0-9\-_]+)(?:[\"\']?)",
+ IGNORECASE,
+)
+
+IANA_NO_ALIASES = [
+ "cp720",
+ "cp737",
+ "cp856",
+ "cp874",
+ "cp875",
+ "cp1006",
+ "koi8_r",
+ "koi8_t",
+ "koi8_u",
+]
+
+IANA_SUPPORTED: List[str] = sorted(
+ filter(
+ lambda x: x.endswith("_codec") is False
+ and x not in {"rot_13", "tactis", "mbcs"},
+ list(set(aliases.values())) + IANA_NO_ALIASES,
+ )
+)
+
+IANA_SUPPORTED_COUNT: int = len(IANA_SUPPORTED)
+
+# pre-computed code page that are similar using the function cp_similarity.
+IANA_SUPPORTED_SIMILAR: Dict[str, List[str]] = {
+ "cp037": ["cp1026", "cp1140", "cp273", "cp500"],
+ "cp1026": ["cp037", "cp1140", "cp273", "cp500"],
+ "cp1125": ["cp866"],
+ "cp1140": ["cp037", "cp1026", "cp273", "cp500"],
+ "cp1250": ["iso8859_2"],
+ "cp1251": ["kz1048", "ptcp154"],
+ "cp1252": ["iso8859_15", "iso8859_9", "latin_1"],
+ "cp1253": ["iso8859_7"],
+ "cp1254": ["iso8859_15", "iso8859_9", "latin_1"],
+ "cp1257": ["iso8859_13"],
+ "cp273": ["cp037", "cp1026", "cp1140", "cp500"],
+ "cp437": ["cp850", "cp858", "cp860", "cp861", "cp862", "cp863", "cp865"],
+ "cp500": ["cp037", "cp1026", "cp1140", "cp273"],
+ "cp850": ["cp437", "cp857", "cp858", "cp865"],
+ "cp857": ["cp850", "cp858", "cp865"],
+ "cp858": ["cp437", "cp850", "cp857", "cp865"],
+ "cp860": ["cp437", "cp861", "cp862", "cp863", "cp865"],
+ "cp861": ["cp437", "cp860", "cp862", "cp863", "cp865"],
+ "cp862": ["cp437", "cp860", "cp861", "cp863", "cp865"],
+ "cp863": ["cp437", "cp860", "cp861", "cp862", "cp865"],
+ "cp865": ["cp437", "cp850", "cp857", "cp858", "cp860", "cp861", "cp862", "cp863"],
+ "cp866": ["cp1125"],
+ "iso8859_10": ["iso8859_14", "iso8859_15", "iso8859_4", "iso8859_9", "latin_1"],
+ "iso8859_11": ["tis_620"],
+ "iso8859_13": ["cp1257"],
+ "iso8859_14": [
+ "iso8859_10",
+ "iso8859_15",
+ "iso8859_16",
+ "iso8859_3",
+ "iso8859_9",
+ "latin_1",
+ ],
+ "iso8859_15": [
+ "cp1252",
+ "cp1254",
+ "iso8859_10",
+ "iso8859_14",
+ "iso8859_16",
+ "iso8859_3",
+ "iso8859_9",
+ "latin_1",
+ ],
+ "iso8859_16": [
+ "iso8859_14",
+ "iso8859_15",
+ "iso8859_2",
+ "iso8859_3",
+ "iso8859_9",
+ "latin_1",
+ ],
+ "iso8859_2": ["cp1250", "iso8859_16", "iso8859_4"],
+ "iso8859_3": ["iso8859_14", "iso8859_15", "iso8859_16", "iso8859_9", "latin_1"],
+ "iso8859_4": ["iso8859_10", "iso8859_2", "iso8859_9", "latin_1"],
+ "iso8859_7": ["cp1253"],
+ "iso8859_9": [
+ "cp1252",
+ "cp1254",
+ "cp1258",
+ "iso8859_10",
+ "iso8859_14",
+ "iso8859_15",
+ "iso8859_16",
+ "iso8859_3",
+ "iso8859_4",
+ "latin_1",
+ ],
+ "kz1048": ["cp1251", "ptcp154"],
+ "latin_1": [
+ "cp1252",
+ "cp1254",
+ "cp1258",
+ "iso8859_10",
+ "iso8859_14",
+ "iso8859_15",
+ "iso8859_16",
+ "iso8859_3",
+ "iso8859_4",
+ "iso8859_9",
+ ],
+ "mac_iceland": ["mac_roman", "mac_turkish"],
+ "mac_roman": ["mac_iceland", "mac_turkish"],
+ "mac_turkish": ["mac_iceland", "mac_roman"],
+ "ptcp154": ["cp1251", "kz1048"],
+ "tis_620": ["iso8859_11"],
+}
+
+
+CHARDET_CORRESPONDENCE: Dict[str, str] = {
+ "iso2022_kr": "ISO-2022-KR",
+ "iso2022_jp": "ISO-2022-JP",
+ "euc_kr": "EUC-KR",
+ "tis_620": "TIS-620",
+ "utf_32": "UTF-32",
+ "euc_jp": "EUC-JP",
+ "koi8_r": "KOI8-R",
+ "iso8859_1": "ISO-8859-1",
+ "iso8859_2": "ISO-8859-2",
+ "iso8859_5": "ISO-8859-5",
+ "iso8859_6": "ISO-8859-6",
+ "iso8859_7": "ISO-8859-7",
+ "iso8859_8": "ISO-8859-8",
+ "utf_16": "UTF-16",
+ "cp855": "IBM855",
+ "mac_cyrillic": "MacCyrillic",
+ "gb2312": "GB2312",
+ "gb18030": "GB18030",
+ "cp932": "CP932",
+ "cp866": "IBM866",
+ "utf_8": "utf-8",
+ "utf_8_sig": "UTF-8-SIG",
+ "shift_jis": "SHIFT_JIS",
+ "big5": "Big5",
+ "cp1250": "windows-1250",
+ "cp1251": "windows-1251",
+ "cp1252": "Windows-1252",
+ "cp1253": "windows-1253",
+ "cp1255": "windows-1255",
+ "cp1256": "windows-1256",
+ "cp1254": "Windows-1254",
+ "cp949": "CP949",
+}
+
+
+COMMON_SAFE_ASCII_CHARACTERS: Set[str] = {
+ "<",
+ ">",
+ "=",
+ ":",
+ "/",
+ "&",
+ ";",
+ "{",
+ "}",
+ "[",
+ "]",
+ ",",
+ "|",
+ '"',
+ "-",
+ "(",
+ ")",
+}
+
+
+KO_NAMES: Set[str] = {"johab", "cp949", "euc_kr"}
+ZH_NAMES: Set[str] = {"big5", "cp950", "big5hkscs", "hz"}
+
+# Logging LEVEL below DEBUG
+TRACE: int = 5
+
+
+# Language label that contain the em dash "—"
+# character are to be considered alternative seq to origin
+FREQUENCIES: Dict[str, List[str]] = {
+ "English": [
+ "e",
+ "a",
+ "t",
+ "i",
+ "o",
+ "n",
+ "s",
+ "r",
+ "h",
+ "l",
+ "d",
+ "c",
+ "u",
+ "m",
+ "f",
+ "p",
+ "g",
+ "w",
+ "y",
+ "b",
+ "v",
+ "k",
+ "x",
+ "j",
+ "z",
+ "q",
+ ],
+ "English—": [
+ "e",
+ "a",
+ "t",
+ "i",
+ "o",
+ "n",
+ "s",
+ "r",
+ "h",
+ "l",
+ "d",
+ "c",
+ "m",
+ "u",
+ "f",
+ "p",
+ "g",
+ "w",
+ "b",
+ "y",
+ "v",
+ "k",
+ "j",
+ "x",
+ "z",
+ "q",
+ ],
+ "German": [
+ "e",
+ "n",
+ "i",
+ "r",
+ "s",
+ "t",
+ "a",
+ "d",
+ "h",
+ "u",
+ "l",
+ "g",
+ "o",
+ "c",
+ "m",
+ "b",
+ "f",
+ "k",
+ "w",
+ "z",
+ "p",
+ "v",
+ "ü",
+ "ä",
+ "ö",
+ "j",
+ ],
+ "French": [
+ "e",
+ "a",
+ "s",
+ "n",
+ "i",
+ "t",
+ "r",
+ "l",
+ "u",
+ "o",
+ "d",
+ "c",
+ "p",
+ "m",
+ "é",
+ "v",
+ "g",
+ "f",
+ "b",
+ "h",
+ "q",
+ "à",
+ "x",
+ "è",
+ "y",
+ "j",
+ ],
+ "Dutch": [
+ "e",
+ "n",
+ "a",
+ "i",
+ "r",
+ "t",
+ "o",
+ "d",
+ "s",
+ "l",
+ "g",
+ "h",
+ "v",
+ "m",
+ "u",
+ "k",
+ "c",
+ "p",
+ "b",
+ "w",
+ "j",
+ "z",
+ "f",
+ "y",
+ "x",
+ "ë",
+ ],
+ "Italian": [
+ "e",
+ "i",
+ "a",
+ "o",
+ "n",
+ "l",
+ "t",
+ "r",
+ "s",
+ "c",
+ "d",
+ "u",
+ "p",
+ "m",
+ "g",
+ "v",
+ "f",
+ "b",
+ "z",
+ "h",
+ "q",
+ "è",
+ "à",
+ "k",
+ "y",
+ "ò",
+ ],
+ "Polish": [
+ "a",
+ "i",
+ "o",
+ "e",
+ "n",
+ "r",
+ "z",
+ "w",
+ "s",
+ "c",
+ "t",
+ "k",
+ "y",
+ "d",
+ "p",
+ "m",
+ "u",
+ "l",
+ "j",
+ "ł",
+ "g",
+ "b",
+ "h",
+ "ą",
+ "ę",
+ "ó",
+ ],
+ "Spanish": [
+ "e",
+ "a",
+ "o",
+ "n",
+ "s",
+ "r",
+ "i",
+ "l",
+ "d",
+ "t",
+ "c",
+ "u",
+ "m",
+ "p",
+ "b",
+ "g",
+ "v",
+ "f",
+ "y",
+ "ó",
+ "h",
+ "q",
+ "í",
+ "j",
+ "z",
+ "á",
+ ],
+ "Russian": [
+ "о",
+ "а",
+ "е",
+ "и",
+ "н",
+ "с",
+ "т",
+ "р",
+ "в",
+ "л",
+ "к",
+ "м",
+ "д",
+ "п",
+ "у",
+ "г",
+ "я",
+ "ы",
+ "з",
+ "б",
+ "й",
+ "ь",
+ "ч",
+ "х",
+ "ж",
+ "ц",
+ ],
+ # Jap-Kanji
+ "Japanese": [
+ "人",
+ "一",
+ "大",
+ "亅",
+ "丁",
+ "丨",
+ "竹",
+ "笑",
+ "口",
+ "日",
+ "今",
+ "二",
+ "彳",
+ "行",
+ "十",
+ "土",
+ "丶",
+ "寸",
+ "寺",
+ "時",
+ "乙",
+ "丿",
+ "乂",
+ "气",
+ "気",
+ "冂",
+ "巾",
+ "亠",
+ "市",
+ "目",
+ "儿",
+ "見",
+ "八",
+ "小",
+ "凵",
+ "県",
+ "月",
+ "彐",
+ "門",
+ "間",
+ "木",
+ "東",
+ "山",
+ "出",
+ "本",
+ "中",
+ "刀",
+ "分",
+ "耳",
+ "又",
+ "取",
+ "最",
+ "言",
+ "田",
+ "心",
+ "思",
+ "刂",
+ "前",
+ "京",
+ "尹",
+ "事",
+ "生",
+ "厶",
+ "云",
+ "会",
+ "未",
+ "来",
+ "白",
+ "冫",
+ "楽",
+ "灬",
+ "馬",
+ "尸",
+ "尺",
+ "駅",
+ "明",
+ "耂",
+ "者",
+ "了",
+ "阝",
+ "都",
+ "高",
+ "卜",
+ "占",
+ "厂",
+ "广",
+ "店",
+ "子",
+ "申",
+ "奄",
+ "亻",
+ "俺",
+ "上",
+ "方",
+ "冖",
+ "学",
+ "衣",
+ "艮",
+ "食",
+ "自",
+ ],
+ # Jap-Katakana
+ "Japanese—": [
+ "ー",
+ "ン",
+ "ス",
+ "・",
+ "ル",
+ "ト",
+ "リ",
+ "イ",
+ "ア",
+ "ラ",
+ "ッ",
+ "ク",
+ "ド",
+ "シ",
+ "レ",
+ "ジ",
+ "タ",
+ "フ",
+ "ロ",
+ "カ",
+ "テ",
+ "マ",
+ "ィ",
+ "グ",
+ "バ",
+ "ム",
+ "プ",
+ "オ",
+ "コ",
+ "デ",
+ "ニ",
+ "ウ",
+ "メ",
+ "サ",
+ "ビ",
+ "ナ",
+ "ブ",
+ "ャ",
+ "エ",
+ "ュ",
+ "チ",
+ "キ",
+ "ズ",
+ "ダ",
+ "パ",
+ "ミ",
+ "ェ",
+ "ョ",
+ "ハ",
+ "セ",
+ "ベ",
+ "ガ",
+ "モ",
+ "ツ",
+ "ネ",
+ "ボ",
+ "ソ",
+ "ノ",
+ "ァ",
+ "ヴ",
+ "ワ",
+ "ポ",
+ "ペ",
+ "ピ",
+ "ケ",
+ "ゴ",
+ "ギ",
+ "ザ",
+ "ホ",
+ "ゲ",
+ "ォ",
+ "ヤ",
+ "ヒ",
+ "ユ",
+ "ヨ",
+ "ヘ",
+ "ゼ",
+ "ヌ",
+ "ゥ",
+ "ゾ",
+ "ヶ",
+ "ヂ",
+ "ヲ",
+ "ヅ",
+ "ヵ",
+ "ヱ",
+ "ヰ",
+ "ヮ",
+ "ヽ",
+ "゠",
+ "ヾ",
+ "ヷ",
+ "ヿ",
+ "ヸ",
+ "ヹ",
+ "ヺ",
+ ],
+ # Jap-Hiragana
+ "Japanese——": [
+ "の",
+ "に",
+ "る",
+ "た",
+ "と",
+ "は",
+ "し",
+ "い",
+ "を",
+ "で",
+ "て",
+ "が",
+ "な",
+ "れ",
+ "か",
+ "ら",
+ "さ",
+ "っ",
+ "り",
+ "す",
+ "あ",
+ "も",
+ "こ",
+ "ま",
+ "う",
+ "く",
+ "よ",
+ "き",
+ "ん",
+ "め",
+ "お",
+ "け",
+ "そ",
+ "つ",
+ "だ",
+ "や",
+ "え",
+ "ど",
+ "わ",
+ "ち",
+ "み",
+ "せ",
+ "じ",
+ "ば",
+ "へ",
+ "び",
+ "ず",
+ "ろ",
+ "ほ",
+ "げ",
+ "む",
+ "べ",
+ "ひ",
+ "ょ",
+ "ゆ",
+ "ぶ",
+ "ご",
+ "ゃ",
+ "ね",
+ "ふ",
+ "ぐ",
+ "ぎ",
+ "ぼ",
+ "ゅ",
+ "づ",
+ "ざ",
+ "ぞ",
+ "ぬ",
+ "ぜ",
+ "ぱ",
+ "ぽ",
+ "ぷ",
+ "ぴ",
+ "ぃ",
+ "ぁ",
+ "ぇ",
+ "ぺ",
+ "ゞ",
+ "ぢ",
+ "ぉ",
+ "ぅ",
+ "ゐ",
+ "ゝ",
+ "ゑ",
+ "゛",
+ "゜",
+ "ゎ",
+ "ゔ",
+ "゚",
+ "ゟ",
+ "゙",
+ "ゕ",
+ "ゖ",
+ ],
+ "Portuguese": [
+ "a",
+ "e",
+ "o",
+ "s",
+ "i",
+ "r",
+ "d",
+ "n",
+ "t",
+ "m",
+ "u",
+ "c",
+ "l",
+ "p",
+ "g",
+ "v",
+ "b",
+ "f",
+ "h",
+ "ã",
+ "q",
+ "é",
+ "ç",
+ "á",
+ "z",
+ "í",
+ ],
+ "Swedish": [
+ "e",
+ "a",
+ "n",
+ "r",
+ "t",
+ "s",
+ "i",
+ "l",
+ "d",
+ "o",
+ "m",
+ "k",
+ "g",
+ "v",
+ "h",
+ "f",
+ "u",
+ "p",
+ "ä",
+ "c",
+ "b",
+ "ö",
+ "å",
+ "y",
+ "j",
+ "x",
+ ],
+ "Chinese": [
+ "的",
+ "一",
+ "是",
+ "不",
+ "了",
+ "在",
+ "人",
+ "有",
+ "我",
+ "他",
+ "这",
+ "个",
+ "们",
+ "中",
+ "来",
+ "上",
+ "大",
+ "为",
+ "和",
+ "国",
+ "地",
+ "到",
+ "以",
+ "说",
+ "时",
+ "要",
+ "就",
+ "出",
+ "会",
+ "可",
+ "也",
+ "你",
+ "对",
+ "生",
+ "能",
+ "而",
+ "子",
+ "那",
+ "得",
+ "于",
+ "着",
+ "下",
+ "自",
+ "之",
+ "年",
+ "过",
+ "发",
+ "后",
+ "作",
+ "里",
+ "用",
+ "道",
+ "行",
+ "所",
+ "然",
+ "家",
+ "种",
+ "事",
+ "成",
+ "方",
+ "多",
+ "经",
+ "么",
+ "去",
+ "法",
+ "学",
+ "如",
+ "都",
+ "同",
+ "现",
+ "当",
+ "没",
+ "动",
+ "面",
+ "起",
+ "看",
+ "定",
+ "天",
+ "分",
+ "还",
+ "进",
+ "好",
+ "小",
+ "部",
+ "其",
+ "些",
+ "主",
+ "样",
+ "理",
+ "心",
+ "她",
+ "本",
+ "前",
+ "开",
+ "但",
+ "因",
+ "只",
+ "从",
+ "想",
+ "实",
+ ],
+ "Ukrainian": [
+ "о",
+ "а",
+ "н",
+ "і",
+ "и",
+ "р",
+ "в",
+ "т",
+ "е",
+ "с",
+ "к",
+ "л",
+ "у",
+ "д",
+ "м",
+ "п",
+ "з",
+ "я",
+ "ь",
+ "б",
+ "г",
+ "й",
+ "ч",
+ "х",
+ "ц",
+ "ї",
+ ],
+ "Norwegian": [
+ "e",
+ "r",
+ "n",
+ "t",
+ "a",
+ "s",
+ "i",
+ "o",
+ "l",
+ "d",
+ "g",
+ "k",
+ "m",
+ "v",
+ "f",
+ "p",
+ "u",
+ "b",
+ "h",
+ "å",
+ "y",
+ "j",
+ "ø",
+ "c",
+ "æ",
+ "w",
+ ],
+ "Finnish": [
+ "a",
+ "i",
+ "n",
+ "t",
+ "e",
+ "s",
+ "l",
+ "o",
+ "u",
+ "k",
+ "ä",
+ "m",
+ "r",
+ "v",
+ "j",
+ "h",
+ "p",
+ "y",
+ "d",
+ "ö",
+ "g",
+ "c",
+ "b",
+ "f",
+ "w",
+ "z",
+ ],
+ "Vietnamese": [
+ "n",
+ "h",
+ "t",
+ "i",
+ "c",
+ "g",
+ "a",
+ "o",
+ "u",
+ "m",
+ "l",
+ "r",
+ "à",
+ "đ",
+ "s",
+ "e",
+ "v",
+ "p",
+ "b",
+ "y",
+ "ư",
+ "d",
+ "á",
+ "k",
+ "ộ",
+ "ế",
+ ],
+ "Czech": [
+ "o",
+ "e",
+ "a",
+ "n",
+ "t",
+ "s",
+ "i",
+ "l",
+ "v",
+ "r",
+ "k",
+ "d",
+ "u",
+ "m",
+ "p",
+ "í",
+ "c",
+ "h",
+ "z",
+ "á",
+ "y",
+ "j",
+ "b",
+ "ě",
+ "é",
+ "ř",
+ ],
+ "Hungarian": [
+ "e",
+ "a",
+ "t",
+ "l",
+ "s",
+ "n",
+ "k",
+ "r",
+ "i",
+ "o",
+ "z",
+ "á",
+ "é",
+ "g",
+ "m",
+ "b",
+ "y",
+ "v",
+ "d",
+ "h",
+ "u",
+ "p",
+ "j",
+ "ö",
+ "f",
+ "c",
+ ],
+ "Korean": [
+ "이",
+ "다",
+ "에",
+ "의",
+ "는",
+ "로",
+ "하",
+ "을",
+ "가",
+ "고",
+ "지",
+ "서",
+ "한",
+ "은",
+ "기",
+ "으",
+ "년",
+ "대",
+ "사",
+ "시",
+ "를",
+ "리",
+ "도",
+ "인",
+ "스",
+ "일",
+ ],
+ "Indonesian": [
+ "a",
+ "n",
+ "e",
+ "i",
+ "r",
+ "t",
+ "u",
+ "s",
+ "d",
+ "k",
+ "m",
+ "l",
+ "g",
+ "p",
+ "b",
+ "o",
+ "h",
+ "y",
+ "j",
+ "c",
+ "w",
+ "f",
+ "v",
+ "z",
+ "x",
+ "q",
+ ],
+ "Turkish": [
+ "a",
+ "e",
+ "i",
+ "n",
+ "r",
+ "l",
+ "ı",
+ "k",
+ "d",
+ "t",
+ "s",
+ "m",
+ "y",
+ "u",
+ "o",
+ "b",
+ "ü",
+ "ş",
+ "v",
+ "g",
+ "z",
+ "h",
+ "c",
+ "p",
+ "ç",
+ "ğ",
+ ],
+ "Romanian": [
+ "e",
+ "i",
+ "a",
+ "r",
+ "n",
+ "t",
+ "u",
+ "l",
+ "o",
+ "c",
+ "s",
+ "d",
+ "p",
+ "m",
+ "ă",
+ "f",
+ "v",
+ "î",
+ "g",
+ "b",
+ "ș",
+ "ț",
+ "z",
+ "h",
+ "â",
+ "j",
+ ],
+ "Farsi": [
+ "ا",
+ "ی",
+ "ر",
+ "د",
+ "ن",
+ "ه",
+ "و",
+ "م",
+ "ت",
+ "ب",
+ "س",
+ "ل",
+ "ک",
+ "ش",
+ "ز",
+ "ف",
+ "گ",
+ "ع",
+ "خ",
+ "ق",
+ "ج",
+ "آ",
+ "پ",
+ "ح",
+ "ط",
+ "ص",
+ ],
+ "Arabic": [
+ "ا",
+ "ل",
+ "ي",
+ "م",
+ "و",
+ "ن",
+ "ر",
+ "ت",
+ "ب",
+ "ة",
+ "ع",
+ "د",
+ "س",
+ "ف",
+ "ه",
+ "ك",
+ "ق",
+ "أ",
+ "ح",
+ "ج",
+ "ش",
+ "ط",
+ "ص",
+ "ى",
+ "خ",
+ "إ",
+ ],
+ "Danish": [
+ "e",
+ "r",
+ "n",
+ "t",
+ "a",
+ "i",
+ "s",
+ "d",
+ "l",
+ "o",
+ "g",
+ "m",
+ "k",
+ "f",
+ "v",
+ "u",
+ "b",
+ "h",
+ "p",
+ "å",
+ "y",
+ "ø",
+ "æ",
+ "c",
+ "j",
+ "w",
+ ],
+ "Serbian": [
+ "а",
+ "и",
+ "о",
+ "е",
+ "н",
+ "р",
+ "с",
+ "у",
+ "т",
+ "к",
+ "ј",
+ "в",
+ "д",
+ "м",
+ "п",
+ "л",
+ "г",
+ "з",
+ "б",
+ "a",
+ "i",
+ "e",
+ "o",
+ "n",
+ "ц",
+ "ш",
+ ],
+ "Lithuanian": [
+ "i",
+ "a",
+ "s",
+ "o",
+ "r",
+ "e",
+ "t",
+ "n",
+ "u",
+ "k",
+ "m",
+ "l",
+ "p",
+ "v",
+ "d",
+ "j",
+ "g",
+ "ė",
+ "b",
+ "y",
+ "ų",
+ "š",
+ "ž",
+ "c",
+ "ą",
+ "į",
+ ],
+ "Slovene": [
+ "e",
+ "a",
+ "i",
+ "o",
+ "n",
+ "r",
+ "s",
+ "l",
+ "t",
+ "j",
+ "v",
+ "k",
+ "d",
+ "p",
+ "m",
+ "u",
+ "z",
+ "b",
+ "g",
+ "h",
+ "č",
+ "c",
+ "š",
+ "ž",
+ "f",
+ "y",
+ ],
+ "Slovak": [
+ "o",
+ "a",
+ "e",
+ "n",
+ "i",
+ "r",
+ "v",
+ "t",
+ "s",
+ "l",
+ "k",
+ "d",
+ "m",
+ "p",
+ "u",
+ "c",
+ "h",
+ "j",
+ "b",
+ "z",
+ "á",
+ "y",
+ "ý",
+ "í",
+ "č",
+ "é",
+ ],
+ "Hebrew": [
+ "י",
+ "ו",
+ "ה",
+ "ל",
+ "ר",
+ "ב",
+ "ת",
+ "מ",
+ "א",
+ "ש",
+ "נ",
+ "ע",
+ "ם",
+ "ד",
+ "ק",
+ "ח",
+ "פ",
+ "ס",
+ "כ",
+ "ג",
+ "ט",
+ "צ",
+ "ן",
+ "ז",
+ "ך",
+ ],
+ "Bulgarian": [
+ "а",
+ "и",
+ "о",
+ "е",
+ "н",
+ "т",
+ "р",
+ "с",
+ "в",
+ "л",
+ "к",
+ "д",
+ "п",
+ "м",
+ "з",
+ "г",
+ "я",
+ "ъ",
+ "у",
+ "б",
+ "ч",
+ "ц",
+ "й",
+ "ж",
+ "щ",
+ "х",
+ ],
+ "Croatian": [
+ "a",
+ "i",
+ "o",
+ "e",
+ "n",
+ "r",
+ "j",
+ "s",
+ "t",
+ "u",
+ "k",
+ "l",
+ "v",
+ "d",
+ "m",
+ "p",
+ "g",
+ "z",
+ "b",
+ "c",
+ "č",
+ "h",
+ "š",
+ "ž",
+ "ć",
+ "f",
+ ],
+ "Hindi": [
+ "क",
+ "र",
+ "स",
+ "न",
+ "त",
+ "म",
+ "ह",
+ "प",
+ "य",
+ "ल",
+ "व",
+ "ज",
+ "द",
+ "ग",
+ "ब",
+ "श",
+ "ट",
+ "अ",
+ "ए",
+ "थ",
+ "भ",
+ "ड",
+ "च",
+ "ध",
+ "ष",
+ "इ",
+ ],
+ "Estonian": [
+ "a",
+ "i",
+ "e",
+ "s",
+ "t",
+ "l",
+ "u",
+ "n",
+ "o",
+ "k",
+ "r",
+ "d",
+ "m",
+ "v",
+ "g",
+ "p",
+ "j",
+ "h",
+ "ä",
+ "b",
+ "õ",
+ "ü",
+ "f",
+ "c",
+ "ö",
+ "y",
+ ],
+ "Thai": [
+ "า",
+ "น",
+ "ร",
+ "อ",
+ "ก",
+ "เ",
+ "ง",
+ "ม",
+ "ย",
+ "ล",
+ "ว",
+ "ด",
+ "ท",
+ "ส",
+ "ต",
+ "ะ",
+ "ป",
+ "บ",
+ "ค",
+ "ห",
+ "แ",
+ "จ",
+ "พ",
+ "ช",
+ "ข",
+ "ใ",
+ ],
+ "Greek": [
+ "α",
+ "τ",
+ "ο",
+ "ι",
+ "ε",
+ "ν",
+ "ρ",
+ "σ",
+ "κ",
+ "η",
+ "π",
+ "ς",
+ "υ",
+ "μ",
+ "λ",
+ "ί",
+ "ό",
+ "ά",
+ "γ",
+ "έ",
+ "δ",
+ "ή",
+ "ω",
+ "χ",
+ "θ",
+ "ύ",
+ ],
+ "Tamil": [
+ "க",
+ "த",
+ "ப",
+ "ட",
+ "ர",
+ "ம",
+ "ல",
+ "ன",
+ "வ",
+ "ற",
+ "ய",
+ "ள",
+ "ச",
+ "ந",
+ "இ",
+ "ண",
+ "அ",
+ "ஆ",
+ "ழ",
+ "ங",
+ "எ",
+ "உ",
+ "ஒ",
+ "ஸ",
+ ],
+ "Kazakh": [
+ "а",
+ "ы",
+ "е",
+ "н",
+ "т",
+ "р",
+ "л",
+ "і",
+ "д",
+ "с",
+ "м",
+ "қ",
+ "к",
+ "о",
+ "б",
+ "и",
+ "у",
+ "ғ",
+ "ж",
+ "ң",
+ "з",
+ "ш",
+ "й",
+ "п",
+ "г",
+ "ө",
+ ],
+}
+
+LANGUAGE_SUPPORTED_COUNT: int = len(FREQUENCIES)
diff --git a/lambdas/aws-dd-forwarder-3.127.0/charset_normalizer/legacy.py b/lambdas/aws-dd-forwarder-3.127.0/charset_normalizer/legacy.py
new file mode 100644
index 0000000..3f6d490
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/charset_normalizer/legacy.py
@@ -0,0 +1,65 @@
+from __future__ import annotations
+
+from typing import TYPE_CHECKING, Any, Optional
+from warnings import warn
+
+from .api import from_bytes
+from .constant import CHARDET_CORRESPONDENCE
+
+# TODO: remove this check when dropping Python 3.7 support
+if TYPE_CHECKING:
+ from typing_extensions import TypedDict
+
+ class ResultDict(TypedDict):
+ encoding: Optional[str]
+ language: str
+ confidence: Optional[float]
+
+
+def detect(
+ byte_str: bytes, should_rename_legacy: bool = False, **kwargs: Any
+) -> ResultDict:
+ """
+ chardet legacy method
+ Detect the encoding of the given byte string. It should be mostly backward-compatible.
+ Encoding name will match Chardet own writing whenever possible. (Not on encoding name unsupported by it)
+ This function is deprecated and should be used to migrate your project easily, consult the documentation for
+ further information. Not planned for removal.
+
+ :param byte_str: The byte sequence to examine.
+ :param should_rename_legacy: Should we rename legacy encodings
+ to their more modern equivalents?
+ """
+ if len(kwargs):
+ warn(
+ f"charset-normalizer disregard arguments '{','.join(list(kwargs.keys()))}' in legacy function detect()"
+ )
+
+ if not isinstance(byte_str, (bytearray, bytes)):
+ raise TypeError( # pragma: nocover
+ "Expected object of type bytes or bytearray, got: "
+ "{0}".format(type(byte_str))
+ )
+
+ if isinstance(byte_str, bytearray):
+ byte_str = bytes(byte_str)
+
+ r = from_bytes(byte_str).best()
+
+ encoding = r.encoding if r is not None else None
+ language = r.language if r is not None and r.language != "Unknown" else ""
+ confidence = 1.0 - r.chaos if r is not None else None
+
+ # Note: CharsetNormalizer does not return 'UTF-8-SIG' as the sig get stripped in the detection/normalization process
+ # but chardet does return 'utf-8-sig' and it is a valid codec name.
+ if r is not None and encoding == "utf_8" and r.bom:
+ encoding += "_sig"
+
+ if should_rename_legacy is False and encoding in CHARDET_CORRESPONDENCE:
+ encoding = CHARDET_CORRESPONDENCE[encoding]
+
+ return {
+ "encoding": encoding,
+ "language": language,
+ "confidence": confidence,
+ }
diff --git a/lambdas/aws-dd-forwarder-3.127.0/charset_normalizer/md.cpython-311-x86_64-linux-gnu.so b/lambdas/aws-dd-forwarder-3.127.0/charset_normalizer/md.cpython-311-x86_64-linux-gnu.so
new file mode 100755
index 0000000..3824a42
Binary files /dev/null and b/lambdas/aws-dd-forwarder-3.127.0/charset_normalizer/md.cpython-311-x86_64-linux-gnu.so differ
diff --git a/lambdas/aws-dd-forwarder-3.127.0/charset_normalizer/md.py b/lambdas/aws-dd-forwarder-3.127.0/charset_normalizer/md.py
new file mode 100644
index 0000000..d834db0
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/charset_normalizer/md.py
@@ -0,0 +1,628 @@
+from functools import lru_cache
+from logging import getLogger
+from typing import List, Optional
+
+from .constant import (
+ COMMON_SAFE_ASCII_CHARACTERS,
+ TRACE,
+ UNICODE_SECONDARY_RANGE_KEYWORD,
+)
+from .utils import (
+ is_accentuated,
+ is_arabic,
+ is_arabic_isolated_form,
+ is_case_variable,
+ is_cjk,
+ is_emoticon,
+ is_hangul,
+ is_hiragana,
+ is_katakana,
+ is_latin,
+ is_punctuation,
+ is_separator,
+ is_symbol,
+ is_thai,
+ is_unprintable,
+ remove_accent,
+ unicode_range,
+)
+
+
+class MessDetectorPlugin:
+ """
+ Base abstract class used for mess detection plugins.
+ All detectors MUST extend and implement given methods.
+ """
+
+ def eligible(self, character: str) -> bool:
+ """
+ Determine if given character should be fed in.
+ """
+ raise NotImplementedError # pragma: nocover
+
+ def feed(self, character: str) -> None:
+ """
+ The main routine to be executed upon character.
+ Insert the logic in witch the text would be considered chaotic.
+ """
+ raise NotImplementedError # pragma: nocover
+
+ def reset(self) -> None: # pragma: no cover
+ """
+ Permit to reset the plugin to the initial state.
+ """
+ raise NotImplementedError
+
+ @property
+ def ratio(self) -> float:
+ """
+ Compute the chaos ratio based on what your feed() has seen.
+ Must NOT be lower than 0.; No restriction gt 0.
+ """
+ raise NotImplementedError # pragma: nocover
+
+
+class TooManySymbolOrPunctuationPlugin(MessDetectorPlugin):
+ def __init__(self) -> None:
+ self._punctuation_count: int = 0
+ self._symbol_count: int = 0
+ self._character_count: int = 0
+
+ self._last_printable_char: Optional[str] = None
+ self._frenzy_symbol_in_word: bool = False
+
+ def eligible(self, character: str) -> bool:
+ return character.isprintable()
+
+ def feed(self, character: str) -> None:
+ self._character_count += 1
+
+ if (
+ character != self._last_printable_char
+ and character not in COMMON_SAFE_ASCII_CHARACTERS
+ ):
+ if is_punctuation(character):
+ self._punctuation_count += 1
+ elif (
+ character.isdigit() is False
+ and is_symbol(character)
+ and is_emoticon(character) is False
+ ):
+ self._symbol_count += 2
+
+ self._last_printable_char = character
+
+ def reset(self) -> None: # pragma: no cover
+ self._punctuation_count = 0
+ self._character_count = 0
+ self._symbol_count = 0
+
+ @property
+ def ratio(self) -> float:
+ if self._character_count == 0:
+ return 0.0
+
+ ratio_of_punctuation: float = (
+ self._punctuation_count + self._symbol_count
+ ) / self._character_count
+
+ return ratio_of_punctuation if ratio_of_punctuation >= 0.3 else 0.0
+
+
+class TooManyAccentuatedPlugin(MessDetectorPlugin):
+ def __init__(self) -> None:
+ self._character_count: int = 0
+ self._accentuated_count: int = 0
+
+ def eligible(self, character: str) -> bool:
+ return character.isalpha()
+
+ def feed(self, character: str) -> None:
+ self._character_count += 1
+
+ if is_accentuated(character):
+ self._accentuated_count += 1
+
+ def reset(self) -> None: # pragma: no cover
+ self._character_count = 0
+ self._accentuated_count = 0
+
+ @property
+ def ratio(self) -> float:
+ if self._character_count < 8:
+ return 0.0
+
+ ratio_of_accentuation: float = self._accentuated_count / self._character_count
+ return ratio_of_accentuation if ratio_of_accentuation >= 0.35 else 0.0
+
+
+class UnprintablePlugin(MessDetectorPlugin):
+ def __init__(self) -> None:
+ self._unprintable_count: int = 0
+ self._character_count: int = 0
+
+ def eligible(self, character: str) -> bool:
+ return True
+
+ def feed(self, character: str) -> None:
+ if is_unprintable(character):
+ self._unprintable_count += 1
+ self._character_count += 1
+
+ def reset(self) -> None: # pragma: no cover
+ self._unprintable_count = 0
+
+ @property
+ def ratio(self) -> float:
+ if self._character_count == 0:
+ return 0.0
+
+ return (self._unprintable_count * 8) / self._character_count
+
+
+class SuspiciousDuplicateAccentPlugin(MessDetectorPlugin):
+ def __init__(self) -> None:
+ self._successive_count: int = 0
+ self._character_count: int = 0
+
+ self._last_latin_character: Optional[str] = None
+
+ def eligible(self, character: str) -> bool:
+ return character.isalpha() and is_latin(character)
+
+ def feed(self, character: str) -> None:
+ self._character_count += 1
+ if (
+ self._last_latin_character is not None
+ and is_accentuated(character)
+ and is_accentuated(self._last_latin_character)
+ ):
+ if character.isupper() and self._last_latin_character.isupper():
+ self._successive_count += 1
+ # Worse if its the same char duplicated with different accent.
+ if remove_accent(character) == remove_accent(self._last_latin_character):
+ self._successive_count += 1
+ self._last_latin_character = character
+
+ def reset(self) -> None: # pragma: no cover
+ self._successive_count = 0
+ self._character_count = 0
+ self._last_latin_character = None
+
+ @property
+ def ratio(self) -> float:
+ if self._character_count == 0:
+ return 0.0
+
+ return (self._successive_count * 2) / self._character_count
+
+
+class SuspiciousRange(MessDetectorPlugin):
+ def __init__(self) -> None:
+ self._suspicious_successive_range_count: int = 0
+ self._character_count: int = 0
+ self._last_printable_seen: Optional[str] = None
+
+ def eligible(self, character: str) -> bool:
+ return character.isprintable()
+
+ def feed(self, character: str) -> None:
+ self._character_count += 1
+
+ if (
+ character.isspace()
+ or is_punctuation(character)
+ or character in COMMON_SAFE_ASCII_CHARACTERS
+ ):
+ self._last_printable_seen = None
+ return
+
+ if self._last_printable_seen is None:
+ self._last_printable_seen = character
+ return
+
+ unicode_range_a: Optional[str] = unicode_range(self._last_printable_seen)
+ unicode_range_b: Optional[str] = unicode_range(character)
+
+ if is_suspiciously_successive_range(unicode_range_a, unicode_range_b):
+ self._suspicious_successive_range_count += 1
+
+ self._last_printable_seen = character
+
+ def reset(self) -> None: # pragma: no cover
+ self._character_count = 0
+ self._suspicious_successive_range_count = 0
+ self._last_printable_seen = None
+
+ @property
+ def ratio(self) -> float:
+ if self._character_count <= 13:
+ return 0.0
+
+ ratio_of_suspicious_range_usage: float = (
+ self._suspicious_successive_range_count * 2
+ ) / self._character_count
+
+ return ratio_of_suspicious_range_usage
+
+
+class SuperWeirdWordPlugin(MessDetectorPlugin):
+ def __init__(self) -> None:
+ self._word_count: int = 0
+ self._bad_word_count: int = 0
+ self._foreign_long_count: int = 0
+
+ self._is_current_word_bad: bool = False
+ self._foreign_long_watch: bool = False
+
+ self._character_count: int = 0
+ self._bad_character_count: int = 0
+
+ self._buffer: str = ""
+ self._buffer_accent_count: int = 0
+ self._buffer_glyph_count: int = 0
+
+ def eligible(self, character: str) -> bool:
+ return True
+
+ def feed(self, character: str) -> None:
+ if character.isalpha():
+ self._buffer += character
+ if is_accentuated(character):
+ self._buffer_accent_count += 1
+ if (
+ self._foreign_long_watch is False
+ and (is_latin(character) is False or is_accentuated(character))
+ and is_cjk(character) is False
+ and is_hangul(character) is False
+ and is_katakana(character) is False
+ and is_hiragana(character) is False
+ and is_thai(character) is False
+ ):
+ self._foreign_long_watch = True
+ if (
+ is_cjk(character)
+ or is_hangul(character)
+ or is_katakana(character)
+ or is_hiragana(character)
+ or is_thai(character)
+ ):
+ self._buffer_glyph_count += 1
+ return
+ if not self._buffer:
+ return
+ if (
+ character.isspace() or is_punctuation(character) or is_separator(character)
+ ) and self._buffer:
+ self._word_count += 1
+ buffer_length: int = len(self._buffer)
+
+ self._character_count += buffer_length
+
+ if buffer_length >= 4:
+ if self._buffer_accent_count / buffer_length >= 0.5:
+ self._is_current_word_bad = True
+ # Word/Buffer ending with an upper case accentuated letter are so rare,
+ # that we will consider them all as suspicious. Same weight as foreign_long suspicious.
+ elif (
+ is_accentuated(self._buffer[-1])
+ and self._buffer[-1].isupper()
+ and all(_.isupper() for _ in self._buffer) is False
+ ):
+ self._foreign_long_count += 1
+ self._is_current_word_bad = True
+ elif self._buffer_glyph_count == 1:
+ self._is_current_word_bad = True
+ self._foreign_long_count += 1
+ if buffer_length >= 24 and self._foreign_long_watch:
+ camel_case_dst = [
+ i
+ for c, i in zip(self._buffer, range(0, buffer_length))
+ if c.isupper()
+ ]
+ probable_camel_cased: bool = False
+
+ if camel_case_dst and (len(camel_case_dst) / buffer_length <= 0.3):
+ probable_camel_cased = True
+
+ if not probable_camel_cased:
+ self._foreign_long_count += 1
+ self._is_current_word_bad = True
+
+ if self._is_current_word_bad:
+ self._bad_word_count += 1
+ self._bad_character_count += len(self._buffer)
+ self._is_current_word_bad = False
+
+ self._foreign_long_watch = False
+ self._buffer = ""
+ self._buffer_accent_count = 0
+ self._buffer_glyph_count = 0
+ elif (
+ character not in {"<", ">", "-", "=", "~", "|", "_"}
+ and character.isdigit() is False
+ and is_symbol(character)
+ ):
+ self._is_current_word_bad = True
+ self._buffer += character
+
+ def reset(self) -> None: # pragma: no cover
+ self._buffer = ""
+ self._is_current_word_bad = False
+ self._foreign_long_watch = False
+ self._bad_word_count = 0
+ self._word_count = 0
+ self._character_count = 0
+ self._bad_character_count = 0
+ self._foreign_long_count = 0
+
+ @property
+ def ratio(self) -> float:
+ if self._word_count <= 10 and self._foreign_long_count == 0:
+ return 0.0
+
+ return self._bad_character_count / self._character_count
+
+
+class CjkInvalidStopPlugin(MessDetectorPlugin):
+ """
+ GB(Chinese) based encoding often render the stop incorrectly when the content does not fit and
+ can be easily detected. Searching for the overuse of '丅' and '丄'.
+ """
+
+ def __init__(self) -> None:
+ self._wrong_stop_count: int = 0
+ self._cjk_character_count: int = 0
+
+ def eligible(self, character: str) -> bool:
+ return True
+
+ def feed(self, character: str) -> None:
+ if character in {"丅", "丄"}:
+ self._wrong_stop_count += 1
+ return
+ if is_cjk(character):
+ self._cjk_character_count += 1
+
+ def reset(self) -> None: # pragma: no cover
+ self._wrong_stop_count = 0
+ self._cjk_character_count = 0
+
+ @property
+ def ratio(self) -> float:
+ if self._cjk_character_count < 16:
+ return 0.0
+ return self._wrong_stop_count / self._cjk_character_count
+
+
+class ArchaicUpperLowerPlugin(MessDetectorPlugin):
+ def __init__(self) -> None:
+ self._buf: bool = False
+
+ self._character_count_since_last_sep: int = 0
+
+ self._successive_upper_lower_count: int = 0
+ self._successive_upper_lower_count_final: int = 0
+
+ self._character_count: int = 0
+
+ self._last_alpha_seen: Optional[str] = None
+ self._current_ascii_only: bool = True
+
+ def eligible(self, character: str) -> bool:
+ return True
+
+ def feed(self, character: str) -> None:
+ is_concerned = character.isalpha() and is_case_variable(character)
+ chunk_sep = is_concerned is False
+
+ if chunk_sep and self._character_count_since_last_sep > 0:
+ if (
+ self._character_count_since_last_sep <= 64
+ and character.isdigit() is False
+ and self._current_ascii_only is False
+ ):
+ self._successive_upper_lower_count_final += (
+ self._successive_upper_lower_count
+ )
+
+ self._successive_upper_lower_count = 0
+ self._character_count_since_last_sep = 0
+ self._last_alpha_seen = None
+ self._buf = False
+ self._character_count += 1
+ self._current_ascii_only = True
+
+ return
+
+ if self._current_ascii_only is True and character.isascii() is False:
+ self._current_ascii_only = False
+
+ if self._last_alpha_seen is not None:
+ if (character.isupper() and self._last_alpha_seen.islower()) or (
+ character.islower() and self._last_alpha_seen.isupper()
+ ):
+ if self._buf is True:
+ self._successive_upper_lower_count += 2
+ self._buf = False
+ else:
+ self._buf = True
+ else:
+ self._buf = False
+
+ self._character_count += 1
+ self._character_count_since_last_sep += 1
+ self._last_alpha_seen = character
+
+ def reset(self) -> None: # pragma: no cover
+ self._character_count = 0
+ self._character_count_since_last_sep = 0
+ self._successive_upper_lower_count = 0
+ self._successive_upper_lower_count_final = 0
+ self._last_alpha_seen = None
+ self._buf = False
+ self._current_ascii_only = True
+
+ @property
+ def ratio(self) -> float:
+ if self._character_count == 0:
+ return 0.0
+
+ return self._successive_upper_lower_count_final / self._character_count
+
+
+class ArabicIsolatedFormPlugin(MessDetectorPlugin):
+ def __init__(self) -> None:
+ self._character_count: int = 0
+ self._isolated_form_count: int = 0
+
+ def reset(self) -> None: # pragma: no cover
+ self._character_count = 0
+ self._isolated_form_count = 0
+
+ def eligible(self, character: str) -> bool:
+ return is_arabic(character)
+
+ def feed(self, character: str) -> None:
+ self._character_count += 1
+
+ if is_arabic_isolated_form(character):
+ self._isolated_form_count += 1
+
+ @property
+ def ratio(self) -> float:
+ if self._character_count < 8:
+ return 0.0
+
+ isolated_form_usage: float = self._isolated_form_count / self._character_count
+
+ return isolated_form_usage
+
+
+@lru_cache(maxsize=1024)
+def is_suspiciously_successive_range(
+ unicode_range_a: Optional[str], unicode_range_b: Optional[str]
+) -> bool:
+ """
+ Determine if two Unicode range seen next to each other can be considered as suspicious.
+ """
+ if unicode_range_a is None or unicode_range_b is None:
+ return True
+
+ if unicode_range_a == unicode_range_b:
+ return False
+
+ if "Latin" in unicode_range_a and "Latin" in unicode_range_b:
+ return False
+
+ if "Emoticons" in unicode_range_a or "Emoticons" in unicode_range_b:
+ return False
+
+ # Latin characters can be accompanied with a combining diacritical mark
+ # eg. Vietnamese.
+ if ("Latin" in unicode_range_a or "Latin" in unicode_range_b) and (
+ "Combining" in unicode_range_a or "Combining" in unicode_range_b
+ ):
+ return False
+
+ keywords_range_a, keywords_range_b = unicode_range_a.split(
+ " "
+ ), unicode_range_b.split(" ")
+
+ for el in keywords_range_a:
+ if el in UNICODE_SECONDARY_RANGE_KEYWORD:
+ continue
+ if el in keywords_range_b:
+ return False
+
+ # Japanese Exception
+ range_a_jp_chars, range_b_jp_chars = (
+ unicode_range_a
+ in (
+ "Hiragana",
+ "Katakana",
+ ),
+ unicode_range_b in ("Hiragana", "Katakana"),
+ )
+ if (range_a_jp_chars or range_b_jp_chars) and (
+ "CJK" in unicode_range_a or "CJK" in unicode_range_b
+ ):
+ return False
+ if range_a_jp_chars and range_b_jp_chars:
+ return False
+
+ if "Hangul" in unicode_range_a or "Hangul" in unicode_range_b:
+ if "CJK" in unicode_range_a or "CJK" in unicode_range_b:
+ return False
+ if unicode_range_a == "Basic Latin" or unicode_range_b == "Basic Latin":
+ return False
+
+ # Chinese/Japanese use dedicated range for punctuation and/or separators.
+ if ("CJK" in unicode_range_a or "CJK" in unicode_range_b) or (
+ unicode_range_a in ["Katakana", "Hiragana"]
+ and unicode_range_b in ["Katakana", "Hiragana"]
+ ):
+ if "Punctuation" in unicode_range_a or "Punctuation" in unicode_range_b:
+ return False
+ if "Forms" in unicode_range_a or "Forms" in unicode_range_b:
+ return False
+ if unicode_range_a == "Basic Latin" or unicode_range_b == "Basic Latin":
+ return False
+
+ return True
+
+
+@lru_cache(maxsize=2048)
+def mess_ratio(
+ decoded_sequence: str, maximum_threshold: float = 0.2, debug: bool = False
+) -> float:
+ """
+ Compute a mess ratio given a decoded bytes sequence. The maximum threshold does stop the computation earlier.
+ """
+
+ detectors: List[MessDetectorPlugin] = [
+ md_class() for md_class in MessDetectorPlugin.__subclasses__()
+ ]
+
+ length: int = len(decoded_sequence) + 1
+
+ mean_mess_ratio: float = 0.0
+
+ if length < 512:
+ intermediary_mean_mess_ratio_calc: int = 32
+ elif length <= 1024:
+ intermediary_mean_mess_ratio_calc = 64
+ else:
+ intermediary_mean_mess_ratio_calc = 128
+
+ for character, index in zip(decoded_sequence + "\n", range(length)):
+ for detector in detectors:
+ if detector.eligible(character):
+ detector.feed(character)
+
+ if (
+ index > 0 and index % intermediary_mean_mess_ratio_calc == 0
+ ) or index == length - 1:
+ mean_mess_ratio = sum(dt.ratio for dt in detectors)
+
+ if mean_mess_ratio >= maximum_threshold:
+ break
+
+ if debug:
+ logger = getLogger("charset_normalizer")
+
+ logger.log(
+ TRACE,
+ "Mess-detector extended-analysis start. "
+ f"intermediary_mean_mess_ratio_calc={intermediary_mean_mess_ratio_calc} mean_mess_ratio={mean_mess_ratio} "
+ f"maximum_threshold={maximum_threshold}",
+ )
+
+ if len(decoded_sequence) > 16:
+ logger.log(TRACE, f"Starting with: {decoded_sequence[:16]}")
+ logger.log(TRACE, f"Ending with: {decoded_sequence[-16::]}")
+
+ for dt in detectors: # pragma: nocover
+ logger.log(TRACE, f"{dt.__class__}: {dt.ratio}")
+
+ return round(mean_mess_ratio, 3)
diff --git a/lambdas/aws-dd-forwarder-3.127.0/charset_normalizer/md__mypyc.cpython-311-x86_64-linux-gnu.so b/lambdas/aws-dd-forwarder-3.127.0/charset_normalizer/md__mypyc.cpython-311-x86_64-linux-gnu.so
new file mode 100755
index 0000000..38d5e70
Binary files /dev/null and b/lambdas/aws-dd-forwarder-3.127.0/charset_normalizer/md__mypyc.cpython-311-x86_64-linux-gnu.so differ
diff --git a/lambdas/aws-dd-forwarder-3.127.0/charset_normalizer/models.py b/lambdas/aws-dd-forwarder-3.127.0/charset_normalizer/models.py
new file mode 100644
index 0000000..6f6b86b
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/charset_normalizer/models.py
@@ -0,0 +1,359 @@
+from encodings.aliases import aliases
+from hashlib import sha256
+from json import dumps
+from re import sub
+from typing import Any, Dict, Iterator, List, Optional, Tuple, Union
+
+from .constant import RE_POSSIBLE_ENCODING_INDICATION, TOO_BIG_SEQUENCE
+from .utils import iana_name, is_multi_byte_encoding, unicode_range
+
+
+class CharsetMatch:
+ def __init__(
+ self,
+ payload: bytes,
+ guessed_encoding: str,
+ mean_mess_ratio: float,
+ has_sig_or_bom: bool,
+ languages: "CoherenceMatches",
+ decoded_payload: Optional[str] = None,
+ preemptive_declaration: Optional[str] = None,
+ ):
+ self._payload: bytes = payload
+
+ self._encoding: str = guessed_encoding
+ self._mean_mess_ratio: float = mean_mess_ratio
+ self._languages: CoherenceMatches = languages
+ self._has_sig_or_bom: bool = has_sig_or_bom
+ self._unicode_ranges: Optional[List[str]] = None
+
+ self._leaves: List[CharsetMatch] = []
+ self._mean_coherence_ratio: float = 0.0
+
+ self._output_payload: Optional[bytes] = None
+ self._output_encoding: Optional[str] = None
+
+ self._string: Optional[str] = decoded_payload
+
+ self._preemptive_declaration: Optional[str] = preemptive_declaration
+
+ def __eq__(self, other: object) -> bool:
+ if not isinstance(other, CharsetMatch):
+ if isinstance(other, str):
+ return iana_name(other) == self.encoding
+ return False
+ return self.encoding == other.encoding and self.fingerprint == other.fingerprint
+
+ def __lt__(self, other: object) -> bool:
+ """
+ Implemented to make sorted available upon CharsetMatches items.
+ """
+ if not isinstance(other, CharsetMatch):
+ raise ValueError
+
+ chaos_difference: float = abs(self.chaos - other.chaos)
+ coherence_difference: float = abs(self.coherence - other.coherence)
+
+ # Below 1% difference --> Use Coherence
+ if chaos_difference < 0.01 and coherence_difference > 0.02:
+ return self.coherence > other.coherence
+ elif chaos_difference < 0.01 and coherence_difference <= 0.02:
+ # When having a difficult decision, use the result that decoded as many multi-byte as possible.
+ # preserve RAM usage!
+ if len(self._payload) >= TOO_BIG_SEQUENCE:
+ return self.chaos < other.chaos
+ return self.multi_byte_usage > other.multi_byte_usage
+
+ return self.chaos < other.chaos
+
+ @property
+ def multi_byte_usage(self) -> float:
+ return 1.0 - (len(str(self)) / len(self.raw))
+
+ def __str__(self) -> str:
+ # Lazy Str Loading
+ if self._string is None:
+ self._string = str(self._payload, self._encoding, "strict")
+ return self._string
+
+ def __repr__(self) -> str:
+ return "".format(self.encoding, self.fingerprint)
+
+ def add_submatch(self, other: "CharsetMatch") -> None:
+ if not isinstance(other, CharsetMatch) or other == self:
+ raise ValueError(
+ "Unable to add instance <{}> as a submatch of a CharsetMatch".format(
+ other.__class__
+ )
+ )
+
+ other._string = None # Unload RAM usage; dirty trick.
+ self._leaves.append(other)
+
+ @property
+ def encoding(self) -> str:
+ return self._encoding
+
+ @property
+ def encoding_aliases(self) -> List[str]:
+ """
+ Encoding name are known by many name, using this could help when searching for IBM855 when it's listed as CP855.
+ """
+ also_known_as: List[str] = []
+ for u, p in aliases.items():
+ if self.encoding == u:
+ also_known_as.append(p)
+ elif self.encoding == p:
+ also_known_as.append(u)
+ return also_known_as
+
+ @property
+ def bom(self) -> bool:
+ return self._has_sig_or_bom
+
+ @property
+ def byte_order_mark(self) -> bool:
+ return self._has_sig_or_bom
+
+ @property
+ def languages(self) -> List[str]:
+ """
+ Return the complete list of possible languages found in decoded sequence.
+ Usually not really useful. Returned list may be empty even if 'language' property return something != 'Unknown'.
+ """
+ return [e[0] for e in self._languages]
+
+ @property
+ def language(self) -> str:
+ """
+ Most probable language found in decoded sequence. If none were detected or inferred, the property will return
+ "Unknown".
+ """
+ if not self._languages:
+ # Trying to infer the language based on the given encoding
+ # Its either English or we should not pronounce ourselves in certain cases.
+ if "ascii" in self.could_be_from_charset:
+ return "English"
+
+ # doing it there to avoid circular import
+ from charset_normalizer.cd import encoding_languages, mb_encoding_languages
+
+ languages = (
+ mb_encoding_languages(self.encoding)
+ if is_multi_byte_encoding(self.encoding)
+ else encoding_languages(self.encoding)
+ )
+
+ if len(languages) == 0 or "Latin Based" in languages:
+ return "Unknown"
+
+ return languages[0]
+
+ return self._languages[0][0]
+
+ @property
+ def chaos(self) -> float:
+ return self._mean_mess_ratio
+
+ @property
+ def coherence(self) -> float:
+ if not self._languages:
+ return 0.0
+ return self._languages[0][1]
+
+ @property
+ def percent_chaos(self) -> float:
+ return round(self.chaos * 100, ndigits=3)
+
+ @property
+ def percent_coherence(self) -> float:
+ return round(self.coherence * 100, ndigits=3)
+
+ @property
+ def raw(self) -> bytes:
+ """
+ Original untouched bytes.
+ """
+ return self._payload
+
+ @property
+ def submatch(self) -> List["CharsetMatch"]:
+ return self._leaves
+
+ @property
+ def has_submatch(self) -> bool:
+ return len(self._leaves) > 0
+
+ @property
+ def alphabets(self) -> List[str]:
+ if self._unicode_ranges is not None:
+ return self._unicode_ranges
+ # list detected ranges
+ detected_ranges: List[Optional[str]] = [
+ unicode_range(char) for char in str(self)
+ ]
+ # filter and sort
+ self._unicode_ranges = sorted(list({r for r in detected_ranges if r}))
+ return self._unicode_ranges
+
+ @property
+ def could_be_from_charset(self) -> List[str]:
+ """
+ The complete list of encoding that output the exact SAME str result and therefore could be the originating
+ encoding.
+ This list does include the encoding available in property 'encoding'.
+ """
+ return [self._encoding] + [m.encoding for m in self._leaves]
+
+ def output(self, encoding: str = "utf_8") -> bytes:
+ """
+ Method to get re-encoded bytes payload using given target encoding. Default to UTF-8.
+ Any errors will be simply ignored by the encoder NOT replaced.
+ """
+ if self._output_encoding is None or self._output_encoding != encoding:
+ self._output_encoding = encoding
+ decoded_string = str(self)
+ if (
+ self._preemptive_declaration is not None
+ and self._preemptive_declaration.lower()
+ not in ["utf-8", "utf8", "utf_8"]
+ ):
+ patched_header = sub(
+ RE_POSSIBLE_ENCODING_INDICATION,
+ lambda m: m.string[m.span()[0] : m.span()[1]].replace(
+ m.groups()[0], iana_name(self._output_encoding) # type: ignore[arg-type]
+ ),
+ decoded_string[:8192],
+ 1,
+ )
+
+ decoded_string = patched_header + decoded_string[8192:]
+
+ self._output_payload = decoded_string.encode(encoding, "replace")
+
+ return self._output_payload # type: ignore
+
+ @property
+ def fingerprint(self) -> str:
+ """
+ Retrieve the unique SHA256 computed using the transformed (re-encoded) payload. Not the original one.
+ """
+ return sha256(self.output()).hexdigest()
+
+
+class CharsetMatches:
+ """
+ Container with every CharsetMatch items ordered by default from most probable to the less one.
+ Act like a list(iterable) but does not implements all related methods.
+ """
+
+ def __init__(self, results: Optional[List[CharsetMatch]] = None):
+ self._results: List[CharsetMatch] = sorted(results) if results else []
+
+ def __iter__(self) -> Iterator[CharsetMatch]:
+ yield from self._results
+
+ def __getitem__(self, item: Union[int, str]) -> CharsetMatch:
+ """
+ Retrieve a single item either by its position or encoding name (alias may be used here).
+ Raise KeyError upon invalid index or encoding not present in results.
+ """
+ if isinstance(item, int):
+ return self._results[item]
+ if isinstance(item, str):
+ item = iana_name(item, False)
+ for result in self._results:
+ if item in result.could_be_from_charset:
+ return result
+ raise KeyError
+
+ def __len__(self) -> int:
+ return len(self._results)
+
+ def __bool__(self) -> bool:
+ return len(self._results) > 0
+
+ def append(self, item: CharsetMatch) -> None:
+ """
+ Insert a single match. Will be inserted accordingly to preserve sort.
+ Can be inserted as a submatch.
+ """
+ if not isinstance(item, CharsetMatch):
+ raise ValueError(
+ "Cannot append instance '{}' to CharsetMatches".format(
+ str(item.__class__)
+ )
+ )
+ # We should disable the submatch factoring when the input file is too heavy (conserve RAM usage)
+ if len(item.raw) < TOO_BIG_SEQUENCE:
+ for match in self._results:
+ if match.fingerprint == item.fingerprint and match.chaos == item.chaos:
+ match.add_submatch(item)
+ return
+ self._results.append(item)
+ self._results = sorted(self._results)
+
+ def best(self) -> Optional["CharsetMatch"]:
+ """
+ Simply return the first match. Strict equivalent to matches[0].
+ """
+ if not self._results:
+ return None
+ return self._results[0]
+
+ def first(self) -> Optional["CharsetMatch"]:
+ """
+ Redundant method, call the method best(). Kept for BC reasons.
+ """
+ return self.best()
+
+
+CoherenceMatch = Tuple[str, float]
+CoherenceMatches = List[CoherenceMatch]
+
+
+class CliDetectionResult:
+ def __init__(
+ self,
+ path: str,
+ encoding: Optional[str],
+ encoding_aliases: List[str],
+ alternative_encodings: List[str],
+ language: str,
+ alphabets: List[str],
+ has_sig_or_bom: bool,
+ chaos: float,
+ coherence: float,
+ unicode_path: Optional[str],
+ is_preferred: bool,
+ ):
+ self.path: str = path
+ self.unicode_path: Optional[str] = unicode_path
+ self.encoding: Optional[str] = encoding
+ self.encoding_aliases: List[str] = encoding_aliases
+ self.alternative_encodings: List[str] = alternative_encodings
+ self.language: str = language
+ self.alphabets: List[str] = alphabets
+ self.has_sig_or_bom: bool = has_sig_or_bom
+ self.chaos: float = chaos
+ self.coherence: float = coherence
+ self.is_preferred: bool = is_preferred
+
+ @property
+ def __dict__(self) -> Dict[str, Any]: # type: ignore
+ return {
+ "path": self.path,
+ "encoding": self.encoding,
+ "encoding_aliases": self.encoding_aliases,
+ "alternative_encodings": self.alternative_encodings,
+ "language": self.language,
+ "alphabets": self.alphabets,
+ "has_sig_or_bom": self.has_sig_or_bom,
+ "chaos": self.chaos,
+ "coherence": self.coherence,
+ "unicode_path": self.unicode_path,
+ "is_preferred": self.is_preferred,
+ }
+
+ def to_json(self) -> str:
+ return dumps(self.__dict__, ensure_ascii=True, indent=4)
diff --git a/lambdas/aws-dd-forwarder-3.127.0/charset_normalizer/py.typed b/lambdas/aws-dd-forwarder-3.127.0/charset_normalizer/py.typed
new file mode 100644
index 0000000..e69de29
diff --git a/lambdas/aws-dd-forwarder-3.127.0/charset_normalizer/utils.py b/lambdas/aws-dd-forwarder-3.127.0/charset_normalizer/utils.py
new file mode 100644
index 0000000..e5cbbf4
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/charset_normalizer/utils.py
@@ -0,0 +1,421 @@
+import importlib
+import logging
+import unicodedata
+from codecs import IncrementalDecoder
+from encodings.aliases import aliases
+from functools import lru_cache
+from re import findall
+from typing import Generator, List, Optional, Set, Tuple, Union
+
+from _multibytecodec import MultibyteIncrementalDecoder
+
+from .constant import (
+ ENCODING_MARKS,
+ IANA_SUPPORTED_SIMILAR,
+ RE_POSSIBLE_ENCODING_INDICATION,
+ UNICODE_RANGES_COMBINED,
+ UNICODE_SECONDARY_RANGE_KEYWORD,
+ UTF8_MAXIMAL_ALLOCATION,
+)
+
+
+@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION)
+def is_accentuated(character: str) -> bool:
+ try:
+ description: str = unicodedata.name(character)
+ except ValueError:
+ return False
+ return (
+ "WITH GRAVE" in description
+ or "WITH ACUTE" in description
+ or "WITH CEDILLA" in description
+ or "WITH DIAERESIS" in description
+ or "WITH CIRCUMFLEX" in description
+ or "WITH TILDE" in description
+ or "WITH MACRON" in description
+ or "WITH RING ABOVE" in description
+ )
+
+
+@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION)
+def remove_accent(character: str) -> str:
+ decomposed: str = unicodedata.decomposition(character)
+ if not decomposed:
+ return character
+
+ codes: List[str] = decomposed.split(" ")
+
+ return chr(int(codes[0], 16))
+
+
+@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION)
+def unicode_range(character: str) -> Optional[str]:
+ """
+ Retrieve the Unicode range official name from a single character.
+ """
+ character_ord: int = ord(character)
+
+ for range_name, ord_range in UNICODE_RANGES_COMBINED.items():
+ if character_ord in ord_range:
+ return range_name
+
+ return None
+
+
+@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION)
+def is_latin(character: str) -> bool:
+ try:
+ description: str = unicodedata.name(character)
+ except ValueError:
+ return False
+ return "LATIN" in description
+
+
+@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION)
+def is_punctuation(character: str) -> bool:
+ character_category: str = unicodedata.category(character)
+
+ if "P" in character_category:
+ return True
+
+ character_range: Optional[str] = unicode_range(character)
+
+ if character_range is None:
+ return False
+
+ return "Punctuation" in character_range
+
+
+@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION)
+def is_symbol(character: str) -> bool:
+ character_category: str = unicodedata.category(character)
+
+ if "S" in character_category or "N" in character_category:
+ return True
+
+ character_range: Optional[str] = unicode_range(character)
+
+ if character_range is None:
+ return False
+
+ return "Forms" in character_range and character_category != "Lo"
+
+
+@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION)
+def is_emoticon(character: str) -> bool:
+ character_range: Optional[str] = unicode_range(character)
+
+ if character_range is None:
+ return False
+
+ return "Emoticons" in character_range or "Pictographs" in character_range
+
+
+@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION)
+def is_separator(character: str) -> bool:
+ if character.isspace() or character in {"|", "+", "<", ">"}:
+ return True
+
+ character_category: str = unicodedata.category(character)
+
+ return "Z" in character_category or character_category in {"Po", "Pd", "Pc"}
+
+
+@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION)
+def is_case_variable(character: str) -> bool:
+ return character.islower() != character.isupper()
+
+
+@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION)
+def is_cjk(character: str) -> bool:
+ try:
+ character_name = unicodedata.name(character)
+ except ValueError:
+ return False
+
+ return "CJK" in character_name
+
+
+@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION)
+def is_hiragana(character: str) -> bool:
+ try:
+ character_name = unicodedata.name(character)
+ except ValueError:
+ return False
+
+ return "HIRAGANA" in character_name
+
+
+@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION)
+def is_katakana(character: str) -> bool:
+ try:
+ character_name = unicodedata.name(character)
+ except ValueError:
+ return False
+
+ return "KATAKANA" in character_name
+
+
+@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION)
+def is_hangul(character: str) -> bool:
+ try:
+ character_name = unicodedata.name(character)
+ except ValueError:
+ return False
+
+ return "HANGUL" in character_name
+
+
+@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION)
+def is_thai(character: str) -> bool:
+ try:
+ character_name = unicodedata.name(character)
+ except ValueError:
+ return False
+
+ return "THAI" in character_name
+
+
+@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION)
+def is_arabic(character: str) -> bool:
+ try:
+ character_name = unicodedata.name(character)
+ except ValueError:
+ return False
+
+ return "ARABIC" in character_name
+
+
+@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION)
+def is_arabic_isolated_form(character: str) -> bool:
+ try:
+ character_name = unicodedata.name(character)
+ except ValueError:
+ return False
+
+ return "ARABIC" in character_name and "ISOLATED FORM" in character_name
+
+
+@lru_cache(maxsize=len(UNICODE_RANGES_COMBINED))
+def is_unicode_range_secondary(range_name: str) -> bool:
+ return any(keyword in range_name for keyword in UNICODE_SECONDARY_RANGE_KEYWORD)
+
+
+@lru_cache(maxsize=UTF8_MAXIMAL_ALLOCATION)
+def is_unprintable(character: str) -> bool:
+ return (
+ character.isspace() is False # includes \n \t \r \v
+ and character.isprintable() is False
+ and character != "\x1A" # Why? Its the ASCII substitute character.
+ and character != "\ufeff" # bug discovered in Python,
+ # Zero Width No-Break Space located in Arabic Presentation Forms-B, Unicode 1.1 not acknowledged as space.
+ )
+
+
+def any_specified_encoding(sequence: bytes, search_zone: int = 8192) -> Optional[str]:
+ """
+ Extract using ASCII-only decoder any specified encoding in the first n-bytes.
+ """
+ if not isinstance(sequence, bytes):
+ raise TypeError
+
+ seq_len: int = len(sequence)
+
+ results: List[str] = findall(
+ RE_POSSIBLE_ENCODING_INDICATION,
+ sequence[: min(seq_len, search_zone)].decode("ascii", errors="ignore"),
+ )
+
+ if len(results) == 0:
+ return None
+
+ for specified_encoding in results:
+ specified_encoding = specified_encoding.lower().replace("-", "_")
+
+ encoding_alias: str
+ encoding_iana: str
+
+ for encoding_alias, encoding_iana in aliases.items():
+ if encoding_alias == specified_encoding:
+ return encoding_iana
+ if encoding_iana == specified_encoding:
+ return encoding_iana
+
+ return None
+
+
+@lru_cache(maxsize=128)
+def is_multi_byte_encoding(name: str) -> bool:
+ """
+ Verify is a specific encoding is a multi byte one based on it IANA name
+ """
+ return name in {
+ "utf_8",
+ "utf_8_sig",
+ "utf_16",
+ "utf_16_be",
+ "utf_16_le",
+ "utf_32",
+ "utf_32_le",
+ "utf_32_be",
+ "utf_7",
+ } or issubclass(
+ importlib.import_module("encodings.{}".format(name)).IncrementalDecoder,
+ MultibyteIncrementalDecoder,
+ )
+
+
+def identify_sig_or_bom(sequence: bytes) -> Tuple[Optional[str], bytes]:
+ """
+ Identify and extract SIG/BOM in given sequence.
+ """
+
+ for iana_encoding in ENCODING_MARKS:
+ marks: Union[bytes, List[bytes]] = ENCODING_MARKS[iana_encoding]
+
+ if isinstance(marks, bytes):
+ marks = [marks]
+
+ for mark in marks:
+ if sequence.startswith(mark):
+ return iana_encoding, mark
+
+ return None, b""
+
+
+def should_strip_sig_or_bom(iana_encoding: str) -> bool:
+ return iana_encoding not in {"utf_16", "utf_32"}
+
+
+def iana_name(cp_name: str, strict: bool = True) -> str:
+ cp_name = cp_name.lower().replace("-", "_")
+
+ encoding_alias: str
+ encoding_iana: str
+
+ for encoding_alias, encoding_iana in aliases.items():
+ if cp_name in [encoding_alias, encoding_iana]:
+ return encoding_iana
+
+ if strict:
+ raise ValueError("Unable to retrieve IANA for '{}'".format(cp_name))
+
+ return cp_name
+
+
+def range_scan(decoded_sequence: str) -> List[str]:
+ ranges: Set[str] = set()
+
+ for character in decoded_sequence:
+ character_range: Optional[str] = unicode_range(character)
+
+ if character_range is None:
+ continue
+
+ ranges.add(character_range)
+
+ return list(ranges)
+
+
+def cp_similarity(iana_name_a: str, iana_name_b: str) -> float:
+ if is_multi_byte_encoding(iana_name_a) or is_multi_byte_encoding(iana_name_b):
+ return 0.0
+
+ decoder_a = importlib.import_module(
+ "encodings.{}".format(iana_name_a)
+ ).IncrementalDecoder
+ decoder_b = importlib.import_module(
+ "encodings.{}".format(iana_name_b)
+ ).IncrementalDecoder
+
+ id_a: IncrementalDecoder = decoder_a(errors="ignore")
+ id_b: IncrementalDecoder = decoder_b(errors="ignore")
+
+ character_match_count: int = 0
+
+ for i in range(255):
+ to_be_decoded: bytes = bytes([i])
+ if id_a.decode(to_be_decoded) == id_b.decode(to_be_decoded):
+ character_match_count += 1
+
+ return character_match_count / 254
+
+
+def is_cp_similar(iana_name_a: str, iana_name_b: str) -> bool:
+ """
+ Determine if two code page are at least 80% similar. IANA_SUPPORTED_SIMILAR dict was generated using
+ the function cp_similarity.
+ """
+ return (
+ iana_name_a in IANA_SUPPORTED_SIMILAR
+ and iana_name_b in IANA_SUPPORTED_SIMILAR[iana_name_a]
+ )
+
+
+def set_logging_handler(
+ name: str = "charset_normalizer",
+ level: int = logging.INFO,
+ format_string: str = "%(asctime)s | %(levelname)s | %(message)s",
+) -> None:
+ logger = logging.getLogger(name)
+ logger.setLevel(level)
+
+ handler = logging.StreamHandler()
+ handler.setFormatter(logging.Formatter(format_string))
+ logger.addHandler(handler)
+
+
+def cut_sequence_chunks(
+ sequences: bytes,
+ encoding_iana: str,
+ offsets: range,
+ chunk_size: int,
+ bom_or_sig_available: bool,
+ strip_sig_or_bom: bool,
+ sig_payload: bytes,
+ is_multi_byte_decoder: bool,
+ decoded_payload: Optional[str] = None,
+) -> Generator[str, None, None]:
+ if decoded_payload and is_multi_byte_decoder is False:
+ for i in offsets:
+ chunk = decoded_payload[i : i + chunk_size]
+ if not chunk:
+ break
+ yield chunk
+ else:
+ for i in offsets:
+ chunk_end = i + chunk_size
+ if chunk_end > len(sequences) + 8:
+ continue
+
+ cut_sequence = sequences[i : i + chunk_size]
+
+ if bom_or_sig_available and strip_sig_or_bom is False:
+ cut_sequence = sig_payload + cut_sequence
+
+ chunk = cut_sequence.decode(
+ encoding_iana,
+ errors="ignore" if is_multi_byte_decoder else "strict",
+ )
+
+ # multi-byte bad cutting detector and adjustment
+ # not the cleanest way to perform that fix but clever enough for now.
+ if is_multi_byte_decoder and i > 0:
+ chunk_partial_size_chk: int = min(chunk_size, 16)
+
+ if (
+ decoded_payload
+ and chunk[:chunk_partial_size_chk] not in decoded_payload
+ ):
+ for j in range(i, i - 4, -1):
+ cut_sequence = sequences[j:chunk_end]
+
+ if bom_or_sig_available and strip_sig_or_bom is False:
+ cut_sequence = sig_payload + cut_sequence
+
+ chunk = cut_sequence.decode(encoding_iana, errors="ignore")
+
+ if chunk[:chunk_partial_size_chk] in decoded_payload:
+ break
+
+ yield chunk
diff --git a/lambdas/aws-dd-forwarder-3.127.0/charset_normalizer/version.py b/lambdas/aws-dd-forwarder-3.127.0/charset_normalizer/version.py
new file mode 100644
index 0000000..699990e
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/charset_normalizer/version.py
@@ -0,0 +1,6 @@
+"""
+Expose version
+"""
+
+__version__ = "3.4.0"
+VERSION = __version__.split(".")
diff --git a/lambdas/aws-dd-forwarder-3.127.0/customized_log_group.py b/lambdas/aws-dd-forwarder-3.127.0/customized_log_group.py
new file mode 100644
index 0000000..ad63a95
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/customized_log_group.py
@@ -0,0 +1,38 @@
+# Unless explicitly stated otherwise all files in this repository are licensed
+# under the Apache License Version 2.0.
+# This product includes software developed at Datadog (https://www.datadoghq.com/).
+# Copyright 2021 Datadog, Inc.
+
+
+import re
+
+"""
+Customized log group is a log group shared by multiple applications of the same type. Based on the feedback from AWS,
+customers may name the log group arbitrarily. E.g they can name a lambda log group as "/aws/vendedlogs/states/**", which is typically used for Stepfunctions
+In addition, potentially, not just Lambda, any other AWS services can use a customized log group.
+The workaround is to parse the logstream_name to get the source of logs.
+"""
+
+# Example: "2023/11/06/test-customized-log-group1[$LATEST]13e304cba4b9446eb7ef082a00038990"
+REX_LAMBDA_CUSTOMIZE_LOGSTREAM_NAME_PATTERN = re.compile(
+ "^[0-9]{4}\\/[01][0-9]\\/[0-3][0-9]\\/[0-9a-zA-Z_.-]{1,75}\\[(?:\\$LATEST|[0-9A-Za-z_-]{1,129})\\][0-9a-f]{32}$"
+)
+
+
+def is_lambda_customized_log_group(logstream_name):
+ return (
+ REX_LAMBDA_CUSTOMIZE_LOGSTREAM_NAME_PATTERN.fullmatch(logstream_name)
+ is not None
+ )
+
+
+def get_lambda_function_name_from_logstream_name(logstream_name):
+ try:
+ # Not match the pattern for customized Lambda log group
+ if not is_lambda_customized_log_group(logstream_name):
+ return None
+ leftSquareBracketPos = logstream_name.index("[")
+ lastForwardSlashPos = logstream_name.rindex("/")
+ return logstream_name[lastForwardSlashPos + 1 : leftSquareBracketPos]
+ except:
+ return None
diff --git a/lambdas/aws-dd-forwarder-3.127.0/datadog-0.48.0.dist-info/INSTALLER b/lambdas/aws-dd-forwarder-3.127.0/datadog-0.48.0.dist-info/INSTALLER
new file mode 100644
index 0000000..a1b589e
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/datadog-0.48.0.dist-info/INSTALLER
@@ -0,0 +1 @@
+pip
diff --git a/lambdas/aws-dd-forwarder-3.127.0/datadog-0.48.0.dist-info/METADATA b/lambdas/aws-dd-forwarder-3.127.0/datadog-0.48.0.dist-info/METADATA
new file mode 100644
index 0000000..91be477
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/datadog-0.48.0.dist-info/METADATA
@@ -0,0 +1,228 @@
+Metadata-Version: 2.1
+Name: datadog
+Version: 0.48.0
+Summary: The Datadog Python library
+Project-URL: Bug Tracker, https://github.com/DataDog/datadogpy/issues
+Project-URL: Documentation, https://datadogpy.readthedocs.io/en/latest/
+Project-URL: Source Code, https://github.com/DataDog/datadogpy
+Author-email: "Datadog, Inc."
+License: BSD-3-Clause
+License-File: LICENSE
+License-File: LICENSE-3rdparty.csv
+Keywords: datadog
+Classifier: License :: OSI Approved :: BSD License
+Classifier: Operating System :: OS Independent
+Classifier: Programming Language :: Python :: 2.7
+Classifier: Programming Language :: Python :: 3.7
+Classifier: Programming Language :: Python :: 3.8
+Classifier: Programming Language :: Python :: 3.9
+Classifier: Programming Language :: Python :: Implementation :: CPython
+Classifier: Programming Language :: Python :: Implementation :: PyPy
+Requires-Python: !=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7
+Requires-Dist: configparser<5; python_version < '3.0'
+Requires-Dist: requests>=2.6.0
+Requires-Dist: typing; python_version < '3.5'
+Description-Content-Type: text/markdown
+
+# The Datadog Python library
+
+[![Unit Tests](https://dev.azure.com/datadoghq/datadogpy/_apis/build/status/DataDog.datadogpy.unit?branchName=master)](https://dev.azure.com/datadoghq/datadogpy/_build/latest?definitionId=10&branchName=master)
+[![Integration Tests](https://dev.azure.com/datadoghq/datadogpy/_apis/build/status/DataDog.datadogpy.integration?branchName=master)](https://dev.azure.com/datadoghq/datadogpy/_build/latest?definitionId=13&branchName=master)
+[![Documentation Status](https://readthedocs.org/projects/datadogpy/badge/?version=latest)](https://readthedocs.org/projects/datadogpy/?badge=latest)
+[![PyPI - Version](https://img.shields.io/pypi/v/datadog.svg)](https://pypi.org/project/datadog)
+[![PyPI - Downloads](https://pepy.tech/badge/datadog)](https://pepy.tech/project/datadog)
+
+The Datadog Python Library is a collection of tools suitable for inclusion in existing Python projects or for the development of standalone scripts. It provides an abstraction on top of Datadog's raw HTTP interface and the Agent's DogStatsD metrics aggregation server, to interact with Datadog and efficiently report events and metrics.
+
+- Library Documentation: https://datadogpy.readthedocs.io/en/latest/
+- HTTP API Documentation: https://docs.datadoghq.com/api/
+- DatadogHQ: https://datadoghq.com
+
+See [CHANGELOG.md](https://github.com/DataDog/datadogpy/blob/master/CHANGELOG.md) for changes.
+
+## Installation
+
+To install from pip:
+
+ pip install datadog
+
+To install from source:
+
+ python setup.py install
+
+## Datadog API
+
+To support all Datadog HTTP APIs, a generated library is
+available which will expose all the endpoints:
+[datadog-api-client-python](https://github.com/DataDog/datadog-api-client-python).
+
+Find below a working example for submitting an event to your Event Stream:
+
+```python
+from datadog import initialize, api
+
+options = {
+ "api_key": "",
+ "app_key": "",
+}
+
+initialize(**options)
+
+title = "Something big happened!"
+text = "And let me tell you all about it here!"
+tags = ["version:1", "application:web"]
+
+api.Event.create(title=title, text=text, tags=tags)
+```
+
+**Consult the full list of supported Datadog API endpoints with working code examples in [the Datadog API documentation](https://docs.datadoghq.com/api/latest/?code-lang=python).**
+
+**Note**: The full list of available Datadog API endpoints is also available in the [Datadog Python Library documentation](https://datadogpy.readthedocs.io/en/latest/)
+
+#### Environment Variables
+
+As an alternate method to using the `initialize` function with the `options` parameters, set the environment variables `DATADOG_API_KEY` and `DATADOG_APP_KEY` within the context of your application.
+
+If `DATADOG_API_KEY` or `DATADOG_APP_KEY` are not set, the library attempts to fall back to Datadog's APM environment variable prefixes: `DD_API_KEY` and `DD_APP_KEY`.
+
+```python
+from datadog import initialize, api
+
+# Assuming you've set `DD_API_KEY` and `DD_APP_KEY` in your env,
+# initialize() will pick it up automatically
+initialize()
+
+title = "Something big happened!"
+text = "And let me tell you all about it here!"
+tags = ["version:1", "application:web"]
+
+api.Event.create(title=title, text=text, tags=tags)
+```
+
+In development, you can disable any `statsd` metric collection using `DD_DOGSTATSD_DISABLE=True` (or any not-empty value).
+
+## DogStatsD
+
+In order to use DogStatsD metrics, the Agent must be [running and available](https://docs.datadoghq.com/developers/dogstatsd/?code-lang=python).
+
+### Instantiate the DogStatsD client with UDP
+
+Once the Datadog Python Library is installed, instantiate the StatsD client using UDP in your code:
+
+```python
+from datadog import initialize, statsd
+
+options = {
+ "statsd_host": "127.0.0.1",
+ "statsd_port": 8125,
+}
+
+initialize(**options)
+```
+
+See the full list of available [DogStatsD client instantiation parameters](https://docs.datadoghq.com/developers/dogstatsd/?code-lang=python#client-instantiation-parameters).
+
+#### Instantiate the DogStatsd client with UDS
+
+Once the Datadog Python Library is installed, instantiate the StatsD client using UDS in your code:
+```python
+
+from datadog import initialize, statsd
+
+options = {
+ "statsd_socket_path": PATH_TO_SOCKET,
+}
+
+initialize(**options)
+```
+
+#### Origin detection over UDP and UDS
+
+Origin detection is a method to detect which pod `DogStatsD` packets are coming from in order to add the pod's tags to the tag list.
+The `DogStatsD` client attaches an internal tag, `entity_id`. The value of this tag is the content of the `DD_ENTITY_ID` environment variable if found, which is the pod's UID. The Datadog Agent uses this tag to add container tags to the metrics. To avoid overwriting this global tag, make sure to only `append` to the `constant_tags` list.
+
+To enable origin detection over UDP, add the following lines to your application manifest
+```yaml
+env:
+ - name: DD_ENTITY_ID
+ valueFrom:
+ fieldRef:
+ fieldPath: metadata.uid
+```
+
+### Usage
+#### Metrics
+
+After the client is created, you can start sending custom metrics to Datadog. See the dedicated [Metric Submission: DogStatsD documentation](https://docs.datadoghq.com/metrics/dogstatsd_metrics_submission/?code-lang=python) to see how to submit all supported metric types to Datadog with working code examples:
+
+* [Submit a COUNT metric](https://docs.datadoghq.com/metrics/dogstatsd_metrics_submission/?code-lang=python#count).
+* [Submit a GAUGE metric](https://docs.datadoghq.com/metrics/dogstatsd_metrics_submission/?code-lang=python#gauge).
+* [Submit a SET metric](https://docs.datadoghq.com/metrics/dogstatsd_metrics_submission/?code-lang=python#set)
+* [Submit a HISTOGRAM metric](https://docs.datadoghq.com/metrics/dogstatsd_metrics_submission/?code-lang=python#histogram)
+* [Submit a TIMER metric](https://docs.datadoghq.com/metrics/dogstatsd_metrics_submission/?code-lang=python#timer)
+* [Submit a DISTRIBUTION metric](https://docs.datadoghq.com/metrics/dogstatsd_metrics_submission/?code-lang=python#distribution)
+
+Some options are supported when submitting metrics, like [applying a Sample Rate to your metrics](https://docs.datadoghq.com/metrics/dogstatsd_metrics_submission/?code-lang=python#metric-submission-options) or [tagging your metrics with your custom tags](https://docs.datadoghq.com/metrics/dogstatsd_metrics_submission/?code-lang=python#metric-tagging).
+
+#### Events
+
+After the client is created, you can start sending events to your Datadog Event Stream. See the dedicated [Event Submission: DogStatsD documentation](https://docs.datadoghq.com/events/guides/dogstatsd/?code-lang=python) to see how to submit an event to your Datadog Event Stream.
+
+#### Service Checks
+
+After the client is created, you can start sending Service Checks to Datadog. See the dedicated [Service Check Submission: DogStatsD documentation](https://docs.datadoghq.com/developers/service_checks/dogstatsd_service_checks_submission/?code-lang=python) to see how to submit a Service Check to Datadog.
+
+### Monitoring this client
+
+This client automatically injects telemetry about itself in the DogStatsD stream.
+Those metrics will not be counted as custom and will not be billed. This feature can be disabled using the `statsd.disable_telemetry()` method.
+
+See [Telemetry documentation](https://docs.datadoghq.com/developers/dogstatsd/high_throughput/?code-lang=python#client-side-telemetry) to learn more about it.
+
+### Benchmarks
+
+_Note: You will need to install `psutil` package before running the benchmarks._
+
+If you would like to get an approximate idea on the throughput that your DogStatsD library
+can handle on your system, you can run the included local benchmark code:
+
+```sh-session
+$ # Python 2 Example
+$ python2 -m unittest -vvv tests.performance.test_statsd_throughput
+
+$ # Python 3 Example
+$ python3 -m unittest -vvv tests.performance.test_statsd_throughput
+```
+
+You can also add set `BENCHMARK_*` to customize the runs:
+```sh-session
+$ # Example #1
+$ BENCHMARK_NUM_RUNS=10 BENCHMARK_NUM_THREADS=1 BENCHMARK_NUM_DATAPOINTS=5000 BENCHMARK_TRANSPORT="UDP" python2 -m unittest -vvv tests.performance.test_statsd_throughput
+
+$ # Example #2
+$ BENCHMARK_NUM_THREADS=10 BENCHMARK_TRANSPORT="UDS" python3 -m unittest -vvv tests.performance.test_statsd_throughput
+```
+
+## Maximum packets size in high-throughput scenarios
+
+In order to have the most efficient use of this library in high-throughput scenarios,
+default values for the maximum packets size have already been set for both UDS (8192 bytes)
+and UDP (1432 bytes) in order to have the best usage of the underlying network.
+However, if you perfectly know your network and you know that a different value for the maximum packets
+size should be used, you can set it with the parameter `max_buffer_len`. Example:
+
+```python
+from datadog import initialize
+
+options = {
+ "api_key": "",
+ "app_key": "",
+ "max_buffer_len": 4096,
+}
+
+initialize(**options)
+```
+
+## Thread Safety
+
+`DogStatsD` and `ThreadStats` are thread-safe.
diff --git a/lambdas/aws-dd-forwarder-3.127.0/datadog-0.48.0.dist-info/RECORD b/lambdas/aws-dd-forwarder-3.127.0/datadog-0.48.0.dist-info/RECORD
new file mode 100644
index 0000000..dd845b1
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/datadog-0.48.0.dist-info/RECORD
@@ -0,0 +1,161 @@
+../../bin/dog,sha256=USXQo9Llj4ZKaLnsoeHfNyJv8BOTxhRN2K3ybH4P368,224
+../../bin/dogshell,sha256=USXQo9Llj4ZKaLnsoeHfNyJv8BOTxhRN2K3ybH4P368,224
+../../bin/dogshellwrap,sha256=F4Dt5QTVYjdzj46YMwEj3E_oJ5ccVicfzgwWTFFWhd8,229
+../../bin/dogwrap,sha256=F4Dt5QTVYjdzj46YMwEj3E_oJ5ccVicfzgwWTFFWhd8,229
+datadog-0.48.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
+datadog-0.48.0.dist-info/METADATA,sha256=xQ5n174J3GUL9awAKYujVRMfD2vnAz_D-cPSF2kXCf8,10144
+datadog-0.48.0.dist-info/RECORD,,
+datadog-0.48.0.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+datadog-0.48.0.dist-info/WHEEL,sha256=VYAwk8D_V6zmIA2XKK-k7Fem_KAtVk3hugaRru3yjGc,105
+datadog-0.48.0.dist-info/entry_points.txt,sha256=UD-9aGJqcY-y_BqSuZvhoZEmoZcHJaAtfwmPHO3kf3g,158
+datadog-0.48.0.dist-info/licenses/LICENSE,sha256=LUpuzY69IJ0kJ3YlYDI6c8_9l9eyeW5xHv8-gRhgozM,1503
+datadog-0.48.0.dist-info/licenses/LICENSE-3rdparty.csv,sha256=W2B3r48ALOEkEFjyoUMmuU1lm9soN-rG9QqtufdYjts,252
+datadog/__init__.py,sha256=64k7GZlnwUUNl8R7aQvDeswZYBpveECUD1_JIdC4C0s,5551
+datadog/__pycache__/__init__.cpython-311.pyc,,
+datadog/__pycache__/version.cpython-311.pyc,,
+datadog/api/__init__.py,sha256=s2vtkIF6TN26-AiD2XEYUr7I9upD8m8VdN5e-b2qCb4,1900
+datadog/api/__pycache__/__init__.cpython-311.pyc,,
+datadog/api/__pycache__/api_client.cpython-311.pyc,,
+datadog/api/__pycache__/aws_integration.cpython-311.pyc,,
+datadog/api/__pycache__/aws_log_integration.cpython-311.pyc,,
+datadog/api/__pycache__/azure_integration.cpython-311.pyc,,
+datadog/api/__pycache__/comments.cpython-311.pyc,,
+datadog/api/__pycache__/constants.cpython-311.pyc,,
+datadog/api/__pycache__/dashboard_list_v2.cpython-311.pyc,,
+datadog/api/__pycache__/dashboard_lists.cpython-311.pyc,,
+datadog/api/__pycache__/dashboards.cpython-311.pyc,,
+datadog/api/__pycache__/distributions.cpython-311.pyc,,
+datadog/api/__pycache__/downtimes.cpython-311.pyc,,
+datadog/api/__pycache__/events.cpython-311.pyc,,
+datadog/api/__pycache__/exceptions.cpython-311.pyc,,
+datadog/api/__pycache__/format.cpython-311.pyc,,
+datadog/api/__pycache__/gcp_integration.cpython-311.pyc,,
+datadog/api/__pycache__/graphs.cpython-311.pyc,,
+datadog/api/__pycache__/hosts.cpython-311.pyc,,
+datadog/api/__pycache__/http_client.cpython-311.pyc,,
+datadog/api/__pycache__/infrastructure.cpython-311.pyc,,
+datadog/api/__pycache__/logs.cpython-311.pyc,,
+datadog/api/__pycache__/metadata.cpython-311.pyc,,
+datadog/api/__pycache__/metrics.cpython-311.pyc,,
+datadog/api/__pycache__/monitors.cpython-311.pyc,,
+datadog/api/__pycache__/permissions.cpython-311.pyc,,
+datadog/api/__pycache__/resources.cpython-311.pyc,,
+datadog/api/__pycache__/roles.cpython-311.pyc,,
+datadog/api/__pycache__/screenboards.cpython-311.pyc,,
+datadog/api/__pycache__/service_checks.cpython-311.pyc,,
+datadog/api/__pycache__/service_level_objectives.cpython-311.pyc,,
+datadog/api/__pycache__/synthetics.cpython-311.pyc,,
+datadog/api/__pycache__/tags.cpython-311.pyc,,
+datadog/api/__pycache__/timeboards.cpython-311.pyc,,
+datadog/api/__pycache__/users.cpython-311.pyc,,
+datadog/api/api_client.py,sha256=56xtTsbuel6TJXHCKrONq_sfscUOCAX4W4sj-TZVwzQ,10340
+datadog/api/aws_integration.py,sha256=ED6sdQCK1C93Z7VV_mO6aLw492G-B-A9mY2UtCRNaaY,10926
+datadog/api/aws_log_integration.py,sha256=vIIOkwVHD1KFkkZibRbkppIg3IgA37j-fyny6_16--M,4434
+datadog/api/azure_integration.py,sha256=wEBgwbh3PGNnPejXbePNTVjuRGDvEJfUoVpoHHkfMQU,3204
+datadog/api/comments.py,sha256=-ujRDfrSlTYt_QAPRNrzDhIZ6ohxEOXGH9L5r3vjhBw,461
+datadog/api/constants.py,sha256=8XSSk69ZSdAzl0q8_sRSMydRRiVT8kkA0Fo3ABceDWA,806
+datadog/api/dashboard_list_v2.py,sha256=kNizRqfzAKc6yq1j_OBGbUN3TjojatxoxSHUlQnn154,677
+datadog/api/dashboard_lists.py,sha256=vKAdF3xhDPwkg615_yyCTnASRJSDLIKW2ZV0oRSZvdY,1166
+datadog/api/dashboards.py,sha256=UQPvh_TtWWQk4Fww9bOTYXz0P8yIp22s6rW9A4ZJGA8,623
+datadog/api/distributions.py,sha256=MEnpxEqw-ZNdXrazMq2vBIzCgkI5uA_kPo1ZaFXppZ8,1895
+datadog/api/downtimes.py,sha256=o7JswAJXaDnK-nqlFDFW3WKPjOKYcB5d86nBK3swgVU,1072
+datadog/api/events.py,sha256=_e3ThyCsWuguRypEgfJ-zC48kaks4E0cdHRtI3tKyYU,3376
+datadog/api/exceptions.py,sha256=eVe7czONQySBZS8Std67ABXep-msgc8UYWnPqBIadnM,3171
+datadog/api/format.py,sha256=6RahpimFhlYEqUVfH9RgB9C1D8mgWYMXxCm8788rS-Y,1209
+datadog/api/gcp_integration.py,sha256=jr9rl-S3V_dV8Qlk5kMCAKSQgjTRet7wWsyfmzPzwqc,3778
+datadog/api/graphs.py,sha256=6BuHX01mxyJ7x8NWI968kmkBLAGBaAKe0JU6LrkoVvM,2695
+datadog/api/hosts.py,sha256=Qr4C6mEexWiCdUp98oIUzelSW2xb-J_c65O_Fpf9jm8,2491
+datadog/api/http_client.py,sha256=9VirvttGWQiWFH-fUW5BmUcNcwJaH9LLTqhgqSd5gcE,6465
+datadog/api/infrastructure.py,sha256=XnY0UvbEv_TKfMMYHAu884F0Jz8RmcPar55qrIil9Zc,1014
+datadog/api/logs.py,sha256=OCI0PmiNON8kIDul5bh-rJPg4Sp8rupQl76fNyMz4sk,727
+datadog/api/metadata.py,sha256=TdrrmRulgc1arbFzCp7PGCTK1vBJx25NWSH9b0KHxZc,2290
+datadog/api/metrics.py,sha256=S2sBGWeFRzcSA8pqu_eFwfBDF55sIEQ80J-ixw2ROas,5217
+datadog/api/monitors.py,sha256=8qqDGUdr_x3mGWeT6fvgJwkf8ydHJUrYzjQXA8JRnS0,4859
+datadog/api/permissions.py,sha256=iZjyktAJrnADC1pstL59H4DZsM_oplIng2rci6lrDss,720
+datadog/api/resources.py,sha256=fY3c-G7LrysmZOJCqi4vYTkRLxFnrbFiLLR_g5dHhZ4,16128
+datadog/api/roles.py,sha256=R0D3WezC7kVFoYR_wfKdbkIRu4k7mgCNag9D_Md7uYk,2370
+datadog/api/screenboards.py,sha256=fNa6N9bSamxopesHeCyIXZo-a4_da8kz60nVkf_t8D4,1389
+datadog/api/service_checks.py,sha256=ejDB2HqzL2iDg3-MUuvs8fwYv89LAh81G71IhSpf6dg,1615
+datadog/api/service_level_objectives.py,sha256=gBs30qG3PjRU78bUkVcxzlxqGsczVGuto8IfvuYk7oE,6547
+datadog/api/synthetics.py,sha256=YM1jyOG4HOxg-xlbI7YTVJ5e-uVIcrLr_7eWJae2e_Q,6135
+datadog/api/tags.py,sha256=u7lrXrKuyn_n-xoCtsJ2N9simr79KvmygiQDnI3XLwk,1612
+datadog/api/timeboards.py,sha256=4vTKGYblmfHtT8NVQ8P_tb-0EEm2X09W_HuR_2Vvnhw,618
+datadog/api/users.py,sha256=wvcRRgeZLYESScFTw3Dont8dwOr0d-ltBZctqnnYNSM,1440
+datadog/dogshell/__init__.py,sha256=5frjaiaWH7bwTuho4RAxOQwLGV6PlWuPONtqNoxCwQ4,4306
+datadog/dogshell/__pycache__/__init__.cpython-311.pyc,,
+datadog/dogshell/__pycache__/comment.cpython-311.pyc,,
+datadog/dogshell/__pycache__/common.cpython-311.pyc,,
+datadog/dogshell/__pycache__/dashboard.cpython-311.pyc,,
+datadog/dogshell/__pycache__/dashboard_list.cpython-311.pyc,,
+datadog/dogshell/__pycache__/downtime.cpython-311.pyc,,
+datadog/dogshell/__pycache__/event.cpython-311.pyc,,
+datadog/dogshell/__pycache__/host.cpython-311.pyc,,
+datadog/dogshell/__pycache__/metric.cpython-311.pyc,,
+datadog/dogshell/__pycache__/monitor.cpython-311.pyc,,
+datadog/dogshell/__pycache__/screenboard.cpython-311.pyc,,
+datadog/dogshell/__pycache__/search.cpython-311.pyc,,
+datadog/dogshell/__pycache__/service_check.cpython-311.pyc,,
+datadog/dogshell/__pycache__/service_level_objective.cpython-311.pyc,,
+datadog/dogshell/__pycache__/tag.cpython-311.pyc,,
+datadog/dogshell/__pycache__/timeboard.cpython-311.pyc,,
+datadog/dogshell/__pycache__/wrap.cpython-311.pyc,,
+datadog/dogshell/comment.py,sha256=-Sai_Bt0Lc1i1izMw-Y9swu1qTMqJersh_m734XphlM,6571
+datadog/dogshell/common.py,sha256=K9rTVyLOhhzmF_b7SloOC7YkrizT66c5_40zDe6thQs,5307
+datadog/dogshell/dashboard.py,sha256=FfEl8NCXg6Uw2af0itRVGZfCpiSPSqXhlpm2DlHDNso,6628
+datadog/dogshell/dashboard_list.py,sha256=Zk1EWwb6uVDfc_s17lW4LDbQe0ZdOhw76_4OyXuYXnA,12591
+datadog/dogshell/downtime.py,sha256=9DtKolbWCHiuhCSJWxgWUDh6TZrm431-Cf1IM-CBzDA,5324
+datadog/dogshell/event.py,sha256=7vIYeXR5hnoMH8QWL_gN5cahvERBZXBfOSYpPautvYw,7398
+datadog/dogshell/host.py,sha256=4hd3kpeyTw6HS_cOLF_zNm2rlXWXvsKakaLUZVkyzXk,2281
+datadog/dogshell/metric.py,sha256=6-D1QR_jhNiP70ltlGB-lYDTW1bDR3H0JAqVb01T1-k,2834
+datadog/dogshell/monitor.py,sha256=Bqb0U_JSGHE0aj7v1uUGHnR7YcEr5N6lz_cW-ivYY7g,16294
+datadog/dogshell/screenboard.py,sha256=q8Jq8chvurkOnUYJQgImMnD3Hl32btvd8_5OuszNLJk,11606
+datadog/dogshell/search.py,sha256=pZ9qoIpgQWh-AotSsgv1Y4GmDGvApHi4_xByk1ii0f0,1663
+datadog/dogshell/service_check.py,sha256=XgTeVqECRmNJU_ie5o14pm9BqAgt2Ka4RLdrNCJFZDY,2161
+datadog/dogshell/service_level_objective.py,sha256=LXCT2Ws4EKquYJhIZ-doY8dhRaGjnDgXdsewlBj5a7w,15044
+datadog/dogshell/tag.py,sha256=u3Dmfwx_HxBlA-5rZZQJMFX24XZ1nRgmJSzsyzEiZrM,4328
+datadog/dogshell/timeboard.py,sha256=9kwRqMtH2PTNKuDpyPn1-jdYhP2AbSgg9HF62HJvJDQ,13298
+datadog/dogshell/wrap.py,sha256=hxjQ2_USc42lsMflQ-GWTKUNi4yUQacyMsBZBd9E6MM,16934
+datadog/dogstatsd/__init__.py,sha256=4IoRSiHQXgIwlpDoC0BegFNS_G9Nq0tZOhTUKyzqCm0,294
+datadog/dogstatsd/__pycache__/__init__.cpython-311.pyc,,
+datadog/dogstatsd/__pycache__/base.cpython-311.pyc,,
+datadog/dogstatsd/__pycache__/container.cpython-311.pyc,,
+datadog/dogstatsd/__pycache__/context.cpython-311.pyc,,
+datadog/dogstatsd/__pycache__/context_async.cpython-311.pyc,,
+datadog/dogstatsd/__pycache__/route.cpython-311.pyc,,
+datadog/dogstatsd/base.py,sha256=tLuou8XH1Q9--1oS0yuFSO0B-UIE5YEolH0paqo81uI,52299
+datadog/dogstatsd/container.py,sha256=0doQtobT4ID8GKDwa-jUjUFr_NTsf0jgc2joaUT0y7o,2052
+datadog/dogstatsd/context.py,sha256=yZgl5pCTHf0GrGkiruAy0H9dVHWZDlvVxjkn6e_elcQ,2873
+datadog/dogstatsd/context_async.py,sha256=wJgbf9n_pHaN95I0I1RoxycjoK18L0ZBGUVrzcVsW4M,1543
+datadog/dogstatsd/route.py,sha256=VOoCuD5XD9PPtEydVjpbz_FldgGEOd8Yazpt2YoVD-U,1253
+datadog/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+datadog/threadstats/__init__.py,sha256=a8JfLr2QiPHpxhEC-U5gmRuh9UI5kaLjvv785o_qEsY,379
+datadog/threadstats/__pycache__/__init__.cpython-311.pyc,,
+datadog/threadstats/__pycache__/aws_lambda.cpython-311.pyc,,
+datadog/threadstats/__pycache__/base.cpython-311.pyc,,
+datadog/threadstats/__pycache__/constants.cpython-311.pyc,,
+datadog/threadstats/__pycache__/events.cpython-311.pyc,,
+datadog/threadstats/__pycache__/metrics.cpython-311.pyc,,
+datadog/threadstats/__pycache__/periodic_timer.cpython-311.pyc,,
+datadog/threadstats/__pycache__/reporters.cpython-311.pyc,,
+datadog/threadstats/aws_lambda.py,sha256=E71iKuW9p4tLW3HZN6QzCPFLmP4ICsSSHogXXefjCHs,3701
+datadog/threadstats/base.py,sha256=YfUWWYL0DptCSCAMUe0qxc35wciCLcqNHEtX8PRFZw8,19162
+datadog/threadstats/constants.py,sha256=3BDnCBKzznBZLsY2oKs8EQBT4vJnIStRcl19FlfxMtw,569
+datadog/threadstats/events.py,sha256=Sa69_TyFoe333mPhcG2vtkYPkeqm-JJTNZDDZWhP1kU,713
+datadog/threadstats/metrics.py,sha256=CAUUzmx6GL78MWLpGWBsm1eZ9RR9Jgs2yCGY24yIp80,6242
+datadog/threadstats/periodic_timer.py,sha256=8DlyzDLcfsVhpoG8sc_MpaJvm-YDx4A5JGkt9vLXVP4,1137
+datadog/threadstats/reporters.py,sha256=SJ45WtEYLModVIq8e6XdGgGAVxPFwW-Cri8d0-s_e1I,937
+datadog/util/__init__.py,sha256=nHOZxl1VhFT33JpvolN8S3QWGNPE-BptvlumBl8pCEo,233
+datadog/util/__pycache__/__init__.cpython-311.pyc,,
+datadog/util/__pycache__/cli.cpython-311.pyc,,
+datadog/util/__pycache__/compat.cpython-311.pyc,,
+datadog/util/__pycache__/config.cpython-311.pyc,,
+datadog/util/__pycache__/deprecation.cpython-311.pyc,,
+datadog/util/__pycache__/format.cpython-311.pyc,,
+datadog/util/__pycache__/hostname.cpython-311.pyc,,
+datadog/util/cli.py,sha256=OCGeY63V_iARHFod1sXbe8Fin7zIAZrA_1zJGqvURMY,5013
+datadog/util/compat.py,sha256=0UxFczhysUXWXsC1ZLo80Rte2qEo_VVt5RdV-x3JmF8,3284
+datadog/util/config.py,sha256=4hT22Kb1jPC7_9nPkaznuwRS3AqQ-X0wFbfrJK11x7I,3922
+datadog/util/deprecation.py,sha256=Aznjj1YLEB0WDt9YO84BVSNFnnolEBdXH9Vwrq1Npx4,782
+datadog/util/format.py,sha256=9jXeqsvnHr44X6B008k25qcwPES6OqB05-s8wee9_0c,1339
+datadog/util/hostname.py,sha256=5yedKu2G59Iv7m3WmdUmhmxb3KgC4VQrHueL4Z1wyJg,10296
+datadog/version.py,sha256=bkYe4lEQZCEmFm0XRZaZkxTV1niMqR_lbp-tzKL6s6c,23
diff --git a/lambdas/aws-dd-forwarder-3.127.0/datadog-0.48.0.dist-info/REQUESTED b/lambdas/aws-dd-forwarder-3.127.0/datadog-0.48.0.dist-info/REQUESTED
new file mode 100644
index 0000000..e69de29
diff --git a/lambdas/aws-dd-forwarder-3.127.0/datadog-0.48.0.dist-info/WHEEL b/lambdas/aws-dd-forwarder-3.127.0/datadog-0.48.0.dist-info/WHEEL
new file mode 100644
index 0000000..a5543ba
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/datadog-0.48.0.dist-info/WHEEL
@@ -0,0 +1,5 @@
+Wheel-Version: 1.0
+Generator: hatchling 1.21.0
+Root-Is-Purelib: true
+Tag: py2-none-any
+Tag: py3-none-any
diff --git a/lambdas/aws-dd-forwarder-3.127.0/datadog-0.48.0.dist-info/entry_points.txt b/lambdas/aws-dd-forwarder-3.127.0/datadog-0.48.0.dist-info/entry_points.txt
new file mode 100644
index 0000000..4b946cb
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/datadog-0.48.0.dist-info/entry_points.txt
@@ -0,0 +1,5 @@
+[console_scripts]
+dog = datadog.dogshell:main
+dogshell = datadog.dogshell:main
+dogshellwrap = datadog.dogshell.wrap:main
+dogwrap = datadog.dogshell.wrap:main
diff --git a/lambdas/aws-dd-forwarder-3.127.0/datadog-0.48.0.dist-info/licenses/LICENSE b/lambdas/aws-dd-forwarder-3.127.0/datadog-0.48.0.dist-info/licenses/LICENSE
new file mode 100644
index 0000000..984d5d2
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/datadog-0.48.0.dist-info/licenses/LICENSE
@@ -0,0 +1,26 @@
+Copyright (c) 2015-Present Datadog, Inc
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are met:
+
+* Redistributions of source code must retain the above copyright notice, this
+ list of conditions and the following disclaimer.
+
+* Redistributions in binary form must reproduce the above copyright notice,
+ this list of conditions and the following disclaimer in the documentation
+ and/or other materials provided with the distribution.
+
+* Neither the name of the copyright holder nor the names of its
+ contributors may be used to endorse or promote products derived from
+ this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
+FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
diff --git a/lambdas/aws-dd-forwarder-3.127.0/datadog-0.48.0.dist-info/licenses/LICENSE-3rdparty.csv b/lambdas/aws-dd-forwarder-3.127.0/datadog-0.48.0.dist-info/licenses/LICENSE-3rdparty.csv
new file mode 100644
index 0000000..3afd934
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/datadog-0.48.0.dist-info/licenses/LICENSE-3rdparty.csv
@@ -0,0 +1,4 @@
+Component,Origin,License,Copyright
+setup.py,decorator,BSD-2-Clause,Copyright (c) 2005-2018, Michele Simionato
+setup.py,requests,Apache-2.0,Copyright 2019 Kenneth Reitz
+setup.py,argparse,Python-2.0,2006-2009 Steven J. Bethard
diff --git a/lambdas/aws-dd-forwarder-3.127.0/datadog/.DS_Store b/lambdas/aws-dd-forwarder-3.127.0/datadog/.DS_Store
new file mode 100644
index 0000000..5900677
Binary files /dev/null and b/lambdas/aws-dd-forwarder-3.127.0/datadog/.DS_Store differ
diff --git a/lambdas/aws-dd-forwarder-3.127.0/datadog/__init__.py b/lambdas/aws-dd-forwarder-3.127.0/datadog/__init__.py
new file mode 100644
index 0000000..ffe4b64
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/datadog/__init__.py
@@ -0,0 +1,138 @@
+# Unless explicitly stated otherwise all files in this repository are licensed under the BSD-3-Clause License.
+# This product includes software developed at Datadog (https://www.datadoghq.com/).
+# Copyright 2015-Present Datadog, Inc
+"""
+Datadogpy is a collection of Datadog Python tools.
+It contains:
+* datadog.api: a Python client for Datadog REST API.
+* datadog.dogstatsd: a DogStatsd Python client.
+* datadog.threadstats: an alternative tool to DogStatsd client for collecting application metrics
+without hindering performance.
+* datadog.dogshell: a command-line tool, wrapping datadog.api, to interact with Datadog REST API.
+"""
+# stdlib
+import logging
+import os
+import os.path
+from typing import Any, List, Optional
+
+# datadog
+from datadog import api
+from datadog.dogstatsd import DogStatsd, statsd # noqa
+from datadog.threadstats import ThreadStats, datadog_lambda_wrapper, lambda_metric # noqa
+from datadog.util.compat import iteritems, NullHandler, text
+from datadog.util.hostname import get_hostname
+from datadog.version import __version__ # noqa
+
+# Loggers
+logging.getLogger("datadog.api").addHandler(NullHandler())
+logging.getLogger("datadog.dogstatsd").addHandler(NullHandler())
+logging.getLogger("datadog.threadstats").addHandler(NullHandler())
+
+
+def initialize(
+ api_key=None, # type: Optional[str]
+ app_key=None, # type: Optional[str]
+ host_name=None, # type: Optional[str]
+ api_host=None, # type: Optional[str]
+ statsd_host=None, # type: Optional[str]
+ statsd_port=None, # type: Optional[int]
+ statsd_disable_buffering=True, # type: bool
+ statsd_use_default_route=False, # type: bool
+ statsd_socket_path=None, # type: Optional[str]
+ statsd_namespace=None, # type: Optional[str]
+ statsd_constant_tags=None, # type: Optional[List[str]]
+ return_raw_response=False, # type: bool
+ hostname_from_config=True, # type: bool
+ **kwargs # type: Any
+):
+ # type: (...) -> None
+ """
+ Initialize and configure Datadog.api and Datadog.statsd modules
+
+ :param api_key: Datadog API key
+ :type api_key: string
+
+ :param app_key: Datadog application key
+ :type app_key: string
+
+ :param host_name: Set a specific hostname
+ :type host_name: string
+
+ :param proxies: Proxy to use to connect to Datadog API;
+ for example, 'proxies': {'http': "http::@:/"}
+ :type proxies: dictionary mapping protocol to the URL of the proxy.
+
+ :param api_host: Datadog API endpoint
+ :type api_host: url
+
+ :param statsd_host: Host of DogStatsd server or statsd daemon
+ :type statsd_host: address
+
+ :param statsd_port: Port of DogStatsd server or statsd daemon
+ :type statsd_port: port
+
+ :param statsd_disable_buffering: Enable/disable statsd client buffering support
+ (default: True).
+ :type statsd_disable_buffering: boolean
+
+ :param statsd_use_default_route: Dynamically set the statsd host to the default route
+ (Useful when running the client in a container)
+ :type statsd_use_default_route: boolean
+
+ :param statsd_socket_path: path to the DogStatsd UNIX socket. Supersedes statsd_host
+ and stats_port if provided.
+
+ :param statsd_constant_tags: A list of tags to be applied to all metrics ("tag", "tag:value")
+ :type statsd_constant_tags: list of string
+
+ :param cacert: Path to local certificate file used to verify SSL \
+ certificates. Can also be set to True (default) to use the systems \
+ certificate store, or False to skip SSL verification
+ :type cacert: path or boolean
+
+ :param mute: Mute any ApiError or ClientError before they escape \
+ from datadog.api.HTTPClient (default: True).
+ :type mute: boolean
+
+ :param return_raw_response: Whether or not to return the raw response object in addition \
+ to the decoded response content (default: False)
+ :type return_raw_response: boolean
+
+ :param hostname_from_config: Set the hostname from the Datadog agent config (agent 5). Will be deprecated
+ :type hostname_from_config: boolean
+ """
+ # API configuration
+ api._api_key = api_key or api._api_key or os.environ.get("DATADOG_API_KEY", os.environ.get("DD_API_KEY"))
+ api._application_key = (
+ app_key or api._application_key or os.environ.get("DATADOG_APP_KEY", os.environ.get("DD_APP_KEY"))
+ )
+ api._hostname_from_config = hostname_from_config
+ api._host_name = host_name or api._host_name or get_hostname(hostname_from_config)
+ api._api_host = api_host or api._api_host or os.environ.get("DATADOG_HOST", "https://api.datadoghq.com")
+
+ # Statsd configuration
+ # ...overrides the default `statsd` instance attributes
+ if statsd_socket_path:
+ statsd.socket_path = statsd_socket_path
+ statsd.host = None
+ statsd.port = None
+ else:
+ if statsd_host or statsd_use_default_route:
+ statsd.host = statsd.resolve_host(statsd_host, statsd_use_default_route)
+ if statsd_port:
+ statsd.port = int(statsd_port)
+ statsd.close_socket()
+ if statsd_namespace:
+ statsd.namespace = text(statsd_namespace)
+ if statsd_constant_tags:
+ statsd.constant_tags += statsd_constant_tags
+
+ statsd.disable_buffering = statsd_disable_buffering
+
+ api._return_raw_response = return_raw_response
+
+ # HTTP client and API options
+ for key, value in iteritems(kwargs):
+ attribute = "_{}".format(key)
+ setattr(api, attribute, value)
diff --git a/lambdas/aws-dd-forwarder-3.127.0/datadog/api/__init__.py b/lambdas/aws-dd-forwarder-3.127.0/datadog/api/__init__.py
new file mode 100644
index 0000000..eb477c9
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/datadog/api/__init__.py
@@ -0,0 +1,52 @@
+# Unless explicitly stated otherwise all files in this repository are licensed under the BSD-3-Clause License.
+# This product includes software developed at Datadog (https://www.datadoghq.com/).
+# Copyright 2015-Present Datadog, Inc
+# flake8: noqa
+
+from typing import Optional
+
+# API settings
+_api_key = None # type: Optional[str]
+_application_key = None # type: Optional[str]
+_api_version = "v1"
+_api_host = None # type: Optional[str]
+_host_name = None # type: Optional[str]
+_hostname_from_config = True
+_cacert = True
+
+# HTTP(S) settings
+_proxies = None
+_timeout = 60
+_max_timeouts = 3
+_max_retries = 3
+_backoff_period = 300
+_mute = True
+_return_raw_response = False
+
+# Resources
+from datadog.api.comments import Comment
+from datadog.api.dashboard_lists import DashboardList
+from datadog.api.distributions import Distribution
+from datadog.api.downtimes import Downtime
+from datadog.api.timeboards import Timeboard
+from datadog.api.dashboards import Dashboard
+from datadog.api.events import Event
+from datadog.api.infrastructure import Infrastructure
+from datadog.api.metadata import Metadata
+from datadog.api.metrics import Metric
+from datadog.api.monitors import Monitor
+from datadog.api.screenboards import Screenboard
+from datadog.api.graphs import Graph, Embed
+from datadog.api.hosts import Host, Hosts
+from datadog.api.service_checks import ServiceCheck
+from datadog.api.tags import Tag
+from datadog.api.users import User
+from datadog.api.aws_integration import AwsIntegration
+from datadog.api.aws_log_integration import AwsLogsIntegration
+from datadog.api.azure_integration import AzureIntegration
+from datadog.api.gcp_integration import GcpIntegration
+from datadog.api.roles import Roles
+from datadog.api.permissions import Permissions
+from datadog.api.service_level_objectives import ServiceLevelObjective
+from datadog.api.synthetics import Synthetics
+from datadog.api.logs import Logs
diff --git a/lambdas/aws-dd-forwarder-3.127.0/datadog/api/api_client.py b/lambdas/aws-dd-forwarder-3.127.0/datadog/api/api_client.py
new file mode 100644
index 0000000..db34873
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/datadog/api/api_client.py
@@ -0,0 +1,290 @@
+# Unless explicitly stated otherwise all files in this repository are licensed under the BSD-3-Clause License.
+# This product includes software developed at Datadog (https://www.datadoghq.com/).
+# Copyright 2015-Present Datadog, Inc
+# stdlib
+import json
+import logging
+import time
+import zlib
+
+# datadog
+from datadog.api import _api_version, _max_timeouts, _backoff_period
+from datadog.api.exceptions import ClientError, ApiError, HttpBackoff, HttpTimeout, ApiNotInitialized
+from datadog.api.http_client import resolve_http_client
+from datadog.util.compat import is_p3k
+from datadog.util.format import construct_url, normalize_tags
+
+
+log = logging.getLogger("datadog.api")
+
+
+class APIClient(object):
+ """
+ Datadog API client: format and submit API calls to Datadog.
+ Embeds a HTTP client.
+ """
+
+ # HTTP transport parameters
+ _backoff_period = _backoff_period
+ _max_timeouts = _max_timeouts
+ _backoff_timestamp = None
+ _timeout_counter = 0
+ _sort_keys = False
+
+ # Plugged HTTP client
+ _http_client = None
+
+ @classmethod
+ def _get_http_client(cls):
+ """
+ Getter for the embedded HTTP client.
+ """
+ if not cls._http_client:
+ cls._http_client = resolve_http_client()
+
+ return cls._http_client
+
+ @classmethod
+ def submit(
+ cls,
+ method,
+ path,
+ api_version=None,
+ body=None,
+ attach_host_name=False,
+ response_formatter=None,
+ error_formatter=None,
+ suppress_response_errors_on_codes=None,
+ compress_payload=False,
+ **params
+ ):
+ """
+ Make an HTTP API request
+
+ :param method: HTTP method to use to contact API endpoint
+ :type method: HTTP method string
+
+ :param path: API endpoint url
+ :type path: url
+
+ :param api_version: The API version used
+
+ :param body: dictionary to be sent in the body of the request
+ :type body: dictionary
+
+ :param response_formatter: function to format JSON response from HTTP API request
+ :type response_formatter: JSON input function
+
+ :param error_formatter: function to format JSON error response from HTTP API request
+ :type error_formatter: JSON input function
+
+ :param attach_host_name: link the new resource object to the host name
+ :type attach_host_name: bool
+
+ :param suppress_response_errors_on_codes: suppress ApiError on `errors` key in the response for the given HTTP
+ status codes
+ :type suppress_response_errors_on_codes: None|list(int)
+
+ :param compress_payload: compress the payload using zlib
+ :type compress_payload: bool
+
+ :param params: dictionary to be sent in the query string of the request
+ :type params: dictionary
+
+ :returns: JSON or formatted response from HTTP API request
+ """
+ try:
+ # Check if it's ok to submit
+ if not cls._should_submit():
+ _, backoff_time_left = cls._backoff_status()
+ raise HttpBackoff(backoff_time_left)
+
+ # Import API, User and HTTP settings
+ from datadog.api import (
+ _api_key,
+ _application_key,
+ _api_host,
+ _mute,
+ _host_name,
+ _proxies,
+ _max_retries,
+ _timeout,
+ _cacert,
+ _return_raw_response,
+ )
+
+ # Check keys and add then to params
+ if _api_key is None:
+ raise ApiNotInitialized("API key is not set." " Please run 'initialize' method first.")
+
+ # Set api and app keys in headers
+ headers = {}
+ headers["DD-API-KEY"] = _api_key
+ if _application_key:
+ headers["DD-APPLICATION-KEY"] = _application_key
+
+ # Check if the api_version is provided
+ if not api_version:
+ api_version = _api_version
+
+ # Attach host name to body
+ if attach_host_name and body:
+ # Is it a 'series' list of objects ?
+ if "series" in body:
+ # Adding the host name to all objects
+ for obj_params in body["series"]:
+ if obj_params.get("host", "") == "":
+ obj_params["host"] = _host_name
+ else:
+ if body.get("host", "") == "":
+ body["host"] = _host_name
+
+ # If defined, make sure tags are defined as a comma-separated string
+ if "tags" in params and isinstance(params["tags"], list):
+ tag_list = normalize_tags(params["tags"])
+ params["tags"] = ",".join(tag_list)
+
+ # If defined, make sure monitor_ids are defined as a comma-separated string
+ if "monitor_ids" in params and isinstance(params["monitor_ids"], list):
+ params["monitor_ids"] = ",".join(str(i) for i in params["monitor_ids"])
+
+ # Process the body, if necessary
+ if isinstance(body, dict):
+ body = json.dumps(body, sort_keys=cls._sort_keys)
+ headers["Content-Type"] = "application/json"
+
+ if compress_payload:
+ body = zlib.compress(body.encode("utf-8"))
+ headers["Content-Encoding"] = "deflate"
+
+ # Construct the URL
+ url = construct_url(_api_host, api_version, path)
+
+ # Process requesting
+ start_time = time.time()
+
+ result = cls._get_http_client().request(
+ method=method,
+ url=url,
+ headers=headers,
+ params=params,
+ data=body,
+ timeout=_timeout,
+ max_retries=_max_retries,
+ proxies=_proxies,
+ verify=_cacert,
+ )
+
+ # Request succeeded: log it and reset the timeout counter
+ duration = round((time.time() - start_time) * 1000.0, 4)
+ log.info("%s %s %s (%sms)" % (result.status_code, method, url, duration))
+ cls._timeout_counter = 0
+
+ # Format response content
+ content = result.content
+
+ if content:
+ try:
+ if is_p3k():
+ response_obj = json.loads(content.decode("utf-8"))
+ else:
+ response_obj = json.loads(content)
+ except ValueError:
+ raise ValueError("Invalid JSON response: {0}".format(content))
+
+ # response_obj can be a bool and not a dict
+ if isinstance(response_obj, dict):
+ if response_obj and "errors" in response_obj:
+ # suppress ApiError when specified and just return the response
+ if not (
+ suppress_response_errors_on_codes
+ and result.status_code in suppress_response_errors_on_codes
+ ):
+ raise ApiError(response_obj)
+ else:
+ response_obj = None
+
+ if response_formatter is not None:
+ response_obj = response_formatter(response_obj)
+
+ if _return_raw_response:
+ return response_obj, result
+ else:
+ return response_obj
+
+ except HttpTimeout:
+ cls._timeout_counter += 1
+ raise
+ except ClientError as e:
+ if _mute:
+ log.error(str(e))
+ if error_formatter is None:
+ return {"errors": e.args[0]}
+ else:
+ return error_formatter({"errors": e.args[0]})
+ else:
+ raise
+ except ApiError as e:
+ if _mute:
+ for error in e.args[0].get("errors") or []:
+ log.error(error)
+ if error_formatter is None:
+ return e.args[0]
+ else:
+ return error_formatter(e.args[0])
+ else:
+ raise
+
+ @classmethod
+ def _should_submit(cls):
+ """
+ Returns True if we're in a state where we should make a request
+ (backoff expired, no backoff in effect), false otherwise.
+ """
+ now = time.time()
+ should_submit = False
+
+ # If we're not backing off, but the timeout counter exceeds the max
+ # number of timeouts, then enter the backoff state, recording the time
+ # we started backing off
+ if not cls._backoff_timestamp and cls._timeout_counter >= cls._max_timeouts:
+ log.info(
+ "Max number of datadog timeouts exceeded, backing off for %s seconds",
+ cls._backoff_period,
+ )
+ cls._backoff_timestamp = now
+ should_submit = False
+
+ # If we are backing off but the we've waiting sufficiently long enough
+ # (backoff_retry_age), exit the backoff state and reset the timeout
+ # counter so that we try submitting metrics again
+ elif cls._backoff_timestamp:
+ backed_off_time, backoff_time_left = cls._backoff_status()
+ if backoff_time_left < 0:
+ log.info(
+ "Exiting backoff state after %s seconds, will try to submit metrics again",
+ backed_off_time,
+ )
+ cls._backoff_timestamp = None
+ cls._timeout_counter = 0
+ should_submit = True
+ else:
+ log.info(
+ "In backoff state, won't submit metrics for another %s seconds",
+ backoff_time_left,
+ )
+ should_submit = False
+ else:
+ should_submit = True
+
+ return should_submit
+
+ @classmethod
+ def _backoff_status(cls):
+ """
+ Get a backoff report, i.e. backoff total and remaining time.
+ """
+ now = time.time()
+ backed_off_time = now - cls._backoff_timestamp
+ backoff_time_left = cls._backoff_period - backed_off_time
+ return round(backed_off_time, 2), round(backoff_time_left, 2)
diff --git a/lambdas/aws-dd-forwarder-3.127.0/datadog/api/aws_integration.py b/lambdas/aws-dd-forwarder-3.127.0/datadog/api/aws_integration.py
new file mode 100644
index 0000000..eb4358b
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/datadog/api/aws_integration.py
@@ -0,0 +1,248 @@
+# Unless explicitly stated otherwise all files in this repository are licensed under the BSD-3-Clause License.
+# This product includes software developed at Datadog (https://www.datadoghq.com/).
+# Copyright 2015-Present Datadog, Inc
+from datadog.api.resources import (
+ GetableAPIResource,
+ CreateableAPIResource,
+ DeletableAPIResource,
+ UpdatableAPIResource,
+ UpdatableAPISubResource,
+ ListableAPISubResource,
+)
+
+
+class AwsIntegration(
+ GetableAPIResource,
+ CreateableAPIResource,
+ DeletableAPIResource,
+ ListableAPISubResource,
+ UpdatableAPIResource,
+ UpdatableAPISubResource,
+):
+ """
+ A wrapper around AWS Integration API.
+ """
+
+ _resource_name = "integration"
+ _resource_id = "aws"
+
+ @classmethod
+ def list(cls, **params):
+ """
+ List all Datadog-AWS integrations available in your Datadog organization.
+
+ >>> api.AwsIntegration.list()
+ """
+ return super(AwsIntegration, cls).get(id=cls._resource_id, **params)
+
+ @classmethod
+ def create(cls, **params):
+ """
+ Add a new AWS integration config.
+
+ :param account_id: Your AWS Account ID without dashes. \
+ Consult the Datadog AWS integration to learn more about \
+ your AWS account ID.
+ :type account_id: string
+
+ :param access_key_id: If your AWS account is a GovCloud \
+ or China account, enter the corresponding Access Key ID.
+ :type access_key_id: string
+
+ :param role_name: Your Datadog role delegation name. \
+ For more information about you AWS account Role name, \
+ see the Datadog AWS integration configuration info.
+ :type role_name: string
+
+ :param filter_tags: The array of EC2 tags (in the form key:value) \
+ defines a filter that Datadog uses when collecting metrics from EC2. \
+ Wildcards, such as ? (for single characters) and * (for multiple characters) \
+ can also be used. Only hosts that match one of the defined tags will be imported \
+ into Datadog. The rest will be ignored. Host matching a given tag can also be \
+ excluded by adding ! before the tag. e.x. \
+ env:production,instance-type:c1.*,!region:us-east-1 For more information \
+ on EC2 tagging, see the AWS tagging documentation.
+ :type filter_tags: list of strings
+
+ :param host_tags: Array of tags (in the form key:value) to add to all hosts and \
+ metrics reporting through this integration.
+ :type host_tags: list of strings
+
+ :param account_specific_namespace_rules: An object (in the form \
+ {"namespace1":true/false, "namespace2":true/false}) that enables \
+ or disables metric collection for specific AWS namespaces for this \
+ AWS account only. A list of namespaces can be found at the \
+ /v1/integration/aws/available_namespace_rules endpoint.
+ :type account_specific_namespace_rules: dictionary
+
+ :param excluded_regions: An array of AWS regions to exclude \
+ from metrics collection.
+ :type excluded_regions: list of strings
+
+ :returns: Dictionary representing the API's JSON response
+
+ >>> account_id = ""
+ >>> access_key_id = ""
+ >>> role_name = "DatadogAwsRole"
+ >>> filter_tags = [":"]
+ >>> host_tags = [":"]
+ >>> account_specific_namespace_rules = {"namespace1":true/false, "namespace2":true/false}
+ >>> excluded_regions = ["us-east-1", "us-west-1"]
+
+ >>> api.AwsIntegration.create(account_id=account_id, role_name=role_name, \
+ filter_tags=filter_tags,host_tags=host_tags,\
+ account_specific_namespace_rules=account_specific_namespace_rules \
+ excluded_regions=excluded_regions)
+ """
+ return super(AwsIntegration, cls).create(id=cls._resource_id, **params)
+
+ @classmethod
+ def update(cls, **body):
+ """
+ Update an AWS integration config.
+
+ :param account_id: Your existing AWS Account ID without dashes. \
+ Consult the Datadog AWS integration to learn more about \
+ your AWS account ID.
+ :type account_id: string
+
+ :param new_account_id: Your new AWS Account ID without dashes. \
+ Consult the Datadog AWS integration to learn more about \
+ your AWS account ID. This is the account to be updated.
+ :type new_account_id: string
+
+ :param role_name: Your existing Datadog role delegation name. \
+ For more information about you AWS account Role name, \
+ see the Datadog AWS integration configuration info.
+ :type role_name: string
+
+ :param new_role_name: Your new Datadog role delegation name. \
+ For more information about you AWS account Role name, \
+ see the Datadog AWS integration configuration info. \
+ This is the role_name to be updated.
+ :type new_role_name: string
+
+ :param access_key_id: If your AWS account is a GovCloud \
+ or China account, enter the existing Access Key ID.
+ :type access_key_id: string
+
+ :param new_access_key_id: If your AWS account is a GovCloud \
+ or China account, enter the new Access Key ID to be set.
+ :type new_access_key_id: string
+
+ :param secret_access_key: If your AWS account is a GovCloud \
+ or China account, enter the existing Secret Access Key.
+ :type secret_access_key: string
+
+ :param new_secret_access_key: If your AWS account is a GovCloud \
+ or China account, enter the new key to be set.
+ :type new_secret_access_key: string
+
+ :param filter_tags: The array of EC2 tags (in the form key:value) \
+ defines a filter that Datadog uses when collecting metrics from EC2. \
+ Wildcards, such as ? (for single characters) and * (for multiple characters) \
+ can also be used. Only hosts that match one of the defined tags will be imported \
+ into Datadog. The rest will be ignored. Host matching a given tag can also be \
+ excluded by adding ! before the tag. e.x. \
+ env:production,instance-type:c1.*,!region:us-east-1 For more information \
+ on EC2 tagging, see the AWS tagging documentation.
+ :type filter_tags: list of strings
+
+ :param host_tags: Array of tags (in the form key:value) to add to all hosts and \
+ metrics reporting through this integration.
+ :type host_tags: list of strings
+
+ :param account_specific_namespace_rules: An object (in the form \
+ {"namespace1":true/false, "namespace2":true/false}) that enables \
+ or disables metric collection for specific AWS namespaces for this \
+ AWS account only. A list of namespaces can be found at the \
+ /v1/integration/aws/available_namespace_rules endpoint.
+ :type account_specific_namespace_rules: dictionary
+
+ :param excluded_regions: An array of AWS regions to exclude \
+ from metrics collection.
+ :type excluded_regions: list of strings
+
+ :returns: Dictionary representing the API's JSON response
+
+ The following will depend on whether role delegation or access keys are being used.
+ If using role delegation, use the fields for role_name and account_id.
+ For access keys, use fields for access_key_id and secret_access_key.
+
+ Both the existing fields and new fields are required no matter what. i.e. If the config is \
+ account_id/role_name based, then `account_id`, `role_name`, `new_account_id`, and \
+ `new_role_name` are all required.
+
+ For access_key based accounts, `access_key_id`, `secret_access_key`, `new_access_key_id`, \
+ and `new_secret_access_key` are all required.
+
+ >>> account_id = ""
+ >>> role_name = ""
+ >>> access_key_id = ""
+ >>> secret_access_key = ""
+ >>> new_account_id = ""
+ >>> new_role_name = ""
+ >>> new_access_key_id = ""
+ >>> new_secret_access_key = ""
+ >>> filter_tags = [":"]
+ >>> host_tags = [":"]
+ >>> account_specific_namespace_rules = {"namespace1":true/false, "namespace2":true/false}
+ >>> excluded_regions = ["us-east-1", "us-west-1"]
+
+ >>> api.AwsIntegration.update(account_id=account_id, role_name=role_name, \
+ new_account_id=new_account_id, new_role_name=new_role_name, \
+ filter_tags=filter_tags,host_tags=host_tags,\
+ account_specific_namespace_rules=account_specific_namespace_rules, \
+ excluded_regions=excluded_regions)
+ """
+ params = {}
+ if body.get("account_id") and body.get("role_name"):
+ params["account_id"] = body.pop("account_id")
+ params["role_name"] = body.pop("role_name")
+ if body.get("new_account_id"):
+ body["account_id"] = body.pop("new_account_id")
+ if body.get("new_role_name"):
+ body["role_name"] = body.pop("new_role_name")
+ if body.get("access_key_id") and body.get("secret_access_key"):
+ params["access_key_id"] = body.pop("access_key_id")
+ params["secret_access_key"] = body.pop("secret_access_key")
+ if body.get("new_access_key_id"):
+ body["access_key_id"] = body.pop("new_access_key_id")
+ if body.get("new_secret_access_key"):
+ body["secret_access_key"] = body.pop("new_secret_access_key")
+ return super(AwsIntegration, cls).update(id=cls._resource_id, params=params, **body)
+
+ @classmethod
+ def delete(cls, **body):
+ """
+ Delete a given Datadog-AWS integration.
+
+ >>> account_id = ""
+ >>> role_name = ""
+
+ >>> api.AwsIntegration.delete()
+ """
+ return super(AwsIntegration, cls).delete(id=cls._resource_id, body=body)
+
+ @classmethod
+ def list_namespace_rules(cls, **params):
+ """
+ List all namespace rules available as options.
+
+ >>> api.AwsIntegration.list_namespace_rules()
+ """
+ cls._sub_resource_name = "available_namespace_rules"
+ return super(AwsIntegration, cls).get_items(id=cls._resource_id, **params)
+
+ @classmethod
+ def generate_new_external_id(cls, **params):
+ """
+ Generate a new AWS external id for a given AWS account id and role name pair.
+
+ >>> account_id = ""
+ >>> role_name = ""
+
+ >>> api.AwsIntegration.generate_new_external_id()
+ """
+ cls._sub_resource_name = "generate_new_external_id"
+ return super(AwsIntegration, cls).update_items(id=cls._resource_id, **params)
diff --git a/lambdas/aws-dd-forwarder-3.127.0/datadog/api/aws_log_integration.py b/lambdas/aws-dd-forwarder-3.127.0/datadog/api/aws_log_integration.py
new file mode 100644
index 0000000..3528435
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/datadog/api/aws_log_integration.py
@@ -0,0 +1,111 @@
+# Unless explicitly stated otherwise all files in this repository are licensed under the BSD-3-Clause License.
+# This product includes software developed at Datadog (https://www.datadoghq.com/).
+# Copyright 2015-Present Datadog, Inc
+from datadog.api.resources import DeletableAPISubResource, ListableAPISubResource, AddableAPISubResource
+
+
+class AwsLogsIntegration(DeletableAPISubResource, ListableAPISubResource, AddableAPISubResource):
+ """
+ A wrapper around AWS Logs API.
+ """
+
+ _resource_name = "integration"
+ _resource_id = "aws"
+
+ @classmethod
+ def list_log_services(cls, **params):
+ """
+ List all namespace rules available as options.
+
+ >>> api.AwsLogsIntegration.list_log_services()
+ """
+ cls._sub_resource_name = "logs/services"
+ return super(AwsLogsIntegration, cls).get_items(id=cls._resource_id, **params)
+
+ @classmethod
+ def add_log_lambda_arn(cls, **params):
+ """
+ Attach the Lambda ARN of the Lambda created for the Datadog-AWS \
+ log collection to your AWS account ID to enable log collection.
+
+ >>> account_id = ""
+ >>> lambda_arn = ""
+
+ >>> api.AwsLogsIntegration.add_log_lambda_arn(account_id=account_id, lambda_arn=lambda_arn)
+ """
+ cls._sub_resource_name = "logs"
+ return super(AwsLogsIntegration, cls).add_items(id=cls._resource_id, **params)
+
+ @classmethod
+ def save_services(cls, **params):
+ """
+ Enable Automatic Log collection for your AWS services.
+
+ >>> account_id = ""
+ >>> services = ["s3", "elb", "elbv2", "cloudfront", "redshift", "lambda"]
+
+ >>> api.AwsLogsIntegration.save_services()
+ """
+ cls._sub_resource_name = "logs/services"
+ return super(AwsLogsIntegration, cls).add_items(id=cls._resource_id, **params)
+
+ @classmethod
+ def delete_config(cls, **params):
+ """
+ Delete a Datadog-AWS log collection configuration by removing the specific Lambda ARN \
+ associated with a given AWS account.
+
+ >>> account_id = ""
+ >>> lambda_arn = ""
+
+ >>> api.AwsLogsIntegration.delete_config(account_id=account_id, lambda_arn=lambda_arn)
+ """
+ cls._sub_resource_name = "logs"
+ return super(AwsLogsIntegration, cls).delete_items(id=cls._resource_id, **params)
+
+ @classmethod
+ def check_lambda(cls, **params):
+ """
+ Check function to see if a lambda_arn exists within an account. \
+ This sends a job on our side if it does not exist, then immediately returns \
+ the status of that job. Subsequent requests will always repeat the above, so this endpoint \
+ can be polled intermittently instead of blocking.
+
+ Returns a status of 'created' when it's checking if the Lambda exists in the account.
+ Returns a status of 'waiting' while checking.
+ Returns a status of 'checked and ok' if the Lambda exists.
+ Returns a status of 'error' if the Lambda does not exist.
+
+ >>> account_id = ""
+ >>> lambda_arn = ""
+
+ >>> api.AwsLogsIntegration.check_lambda(account_id=account_id, lambda_arn=lambda_arn)
+ """
+ cls._sub_resource_name = "logs/check_async"
+ return super(AwsLogsIntegration, cls).add_items(id=cls._resource_id, **params)
+
+ @classmethod
+ def check_services(cls, **params):
+ """
+ Test if permissions are present to add log-forwarding triggers for the \
+ given services + AWS account. Input is the same as for save_services.
+ Done async, so can be repeatedly polled in a non-blocking fashion until \
+ the async request completes
+
+ >>> account_id = ""
+ >>> services = ["s3", "elb", "elbv2", "cloudfront", "redshift", "lambda"]
+
+ >>> api.AwsLogsIntegration.check_services()
+ """
+ cls._sub_resource_name = "logs/services_async"
+ return super(AwsLogsIntegration, cls).add_items(id=cls._resource_id, **params)
+
+ @classmethod
+ def list(cls, **params):
+ """
+ List all Datadog-AWS Logs integrations available in your Datadog organization.
+
+ >>> api.AwsLogsIntegration.list()
+ """
+ cls._sub_resource_name = "logs"
+ return super(AwsLogsIntegration, cls).get_items(id=cls._resource_id, **params)
diff --git a/lambdas/aws-dd-forwarder-3.127.0/datadog/api/azure_integration.py b/lambdas/aws-dd-forwarder-3.127.0/datadog/api/azure_integration.py
new file mode 100644
index 0000000..2bb1cea
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/datadog/api/azure_integration.py
@@ -0,0 +1,91 @@
+# Unless explicitly stated otherwise all files in this repository are licensed under the BSD-3-Clause License.
+# This product includes software developed at Datadog (https://www.datadoghq.com/).
+# Copyright 2015-Present Datadog, Inc
+from datadog.api.resources import (
+ GetableAPIResource,
+ CreateableAPIResource,
+ DeletableAPIResource,
+ UpdatableAPIResource,
+ AddableAPISubResource,
+)
+
+
+class AzureIntegration(
+ GetableAPIResource, CreateableAPIResource, DeletableAPIResource, UpdatableAPIResource, AddableAPISubResource
+):
+ """
+ A wrapper around Azure integration API.
+ """
+
+ _resource_name = "integration"
+ _resource_id = "azure"
+
+ @classmethod
+ def list(cls, **params):
+ """
+ List all Datadog-Azure integrations available in your Datadog organization.
+
+ >>> api.AzureIntegration.list()
+ """
+ return super(AzureIntegration, cls).get(id=cls._resource_id, **params)
+
+ @classmethod
+ def create(cls, **params):
+ """
+ Add a new Azure integration config.
+
+ >>> tenant_name = ""
+ >>> client_id = ""
+ >>> client_secret = ""
+ >>> host_filters = [":"]
+
+ >>> api.AzureIntegration.create(tenant_name=tenant_name, client_id=client_id, \
+ client_secret=client_secret,host_filters=host_filters)
+ """
+ return super(AzureIntegration, cls).create(id=cls._resource_id, **params)
+
+ @classmethod
+ def delete(cls, **body):
+ """
+ Delete a given Datadog-Azure integration.
+
+ >>> tenant_name = ""
+ >>> client_id = ""
+
+ >>> api.AzureIntegration.delete(tenant_name=tenant_name, client_id=client_id)
+ """
+ return super(AzureIntegration, cls).delete(id=cls._resource_id, body=body)
+
+ @classmethod
+ def update_host_filters(cls, **params):
+ """
+ Update the defined list of host filters for a given Datadog-Azure integration. \
+
+ >>> tenant_name = ""
+ >>> client_id = ""
+ >>> host_filters = ":"
+
+ >>> api.AzureIntegration.update_host_filters(tenant_name=tenant_name, client_id=client_id, \
+ host_filters=host_filters)
+ """
+ cls._sub_resource_name = "host_filters"
+ return super(AzureIntegration, cls).add_items(id=cls._resource_id, **params)
+
+ @classmethod
+ def update(cls, **body):
+ """
+ Update an Azure account configuration.
+
+ >>> tenant_name = ""
+ >>> client_id = ""
+ >>> new_tenant_name = ""
+ >>> new_client_id = ""
+ >>> client_secret = ""
+ >>> host_filters = ":"
+
+ >>> api.AzureIntegration.update(tenant_name=tenant_name, client_id=client_id, \
+ new_tenant_name=new_tenant_name, new_client_id=new_client_id,\
+ client_secret=client_secret, host_filters=host_filters)
+ """
+ params = {}
+ return super(AzureIntegration, cls).update(id=cls._resource_id, params=params, **body)
diff --git a/lambdas/aws-dd-forwarder-3.127.0/datadog/api/comments.py b/lambdas/aws-dd-forwarder-3.127.0/datadog/api/comments.py
new file mode 100644
index 0000000..7ecd506
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/datadog/api/comments.py
@@ -0,0 +1,12 @@
+# Unless explicitly stated otherwise all files in this repository are licensed under the BSD-3-Clause License.
+# This product includes software developed at Datadog (https://www.datadoghq.com/).
+# Copyright 2015-Present Datadog, Inc
+from datadog.api.resources import CreateableAPIResource, UpdatableAPIResource
+
+
+class Comment(CreateableAPIResource, UpdatableAPIResource):
+ """
+ A wrapper around Comment HTTP API.
+ """
+
+ _resource_name = "comments"
diff --git a/lambdas/aws-dd-forwarder-3.127.0/datadog/api/constants.py b/lambdas/aws-dd-forwarder-3.127.0/datadog/api/constants.py
new file mode 100644
index 0000000..a7e02b8
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/datadog/api/constants.py
@@ -0,0 +1,25 @@
+# Unless explicitly stated otherwise all files in this repository are licensed under the BSD-3-Clause License.
+# This product includes software developed at Datadog (https://www.datadoghq.com/).
+# Copyright 2015-Present Datadog, Inc
+
+
+class CheckStatus(object):
+ OK = 0
+ WARNING = 1
+ CRITICAL = 2
+ UNKNOWN = 3
+ ALL = (OK, WARNING, CRITICAL, UNKNOWN)
+
+
+class MonitorType(object):
+ # From https://docs.datadoghq.com/api/?lang=bash#create-a-monitor
+ QUERY_ALERT = "query alert"
+ COMPOSITE = "composite"
+ SERVICE_CHECK = "service check"
+ PROCESS_ALERT = "process alert"
+ LOG_ALERT = "log alert"
+ METRIC_ALERT = "metric alert"
+ RUM_ALERT = "rum alert"
+ EVENT_ALERT = "event alert"
+ SYNTHETICS_ALERT = "synthetics alert"
+ TRACE_ANALYTICS = "trace-analytics alert"
diff --git a/lambdas/aws-dd-forwarder-3.127.0/datadog/api/dashboard_list_v2.py b/lambdas/aws-dd-forwarder-3.127.0/datadog/api/dashboard_list_v2.py
new file mode 100644
index 0000000..127fca9
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/datadog/api/dashboard_list_v2.py
@@ -0,0 +1,19 @@
+# Unless explicitly stated otherwise all files in this repository are licensed under the BSD-3-Clause License.
+# This product includes software developed at Datadog (https://www.datadoghq.com/).
+# Copyright 2015-Present Datadog, Inc
+from datadog.api.resources import (
+ AddableAPISubResource,
+ DeletableAPISubResource,
+ ListableAPISubResource,
+ UpdatableAPISubResource,
+)
+
+
+class DashboardListV2(ListableAPISubResource, AddableAPISubResource, UpdatableAPISubResource, DeletableAPISubResource):
+ """
+ A wrapper around Dashboard List HTTP API.
+ """
+
+ _resource_name = "dashboard/lists/manual"
+ _sub_resource_name = "dashboards"
+ _api_version = "v2"
diff --git a/lambdas/aws-dd-forwarder-3.127.0/datadog/api/dashboard_lists.py b/lambdas/aws-dd-forwarder-3.127.0/datadog/api/dashboard_lists.py
new file mode 100644
index 0000000..e83785f
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/datadog/api/dashboard_lists.py
@@ -0,0 +1,39 @@
+# Unless explicitly stated otherwise all files in this repository are licensed under the BSD-3-Clause License.
+# This product includes software developed at Datadog (https://www.datadoghq.com/).
+# Copyright 2015-Present Datadog, Inc
+from datadog.api.resources import (
+ AddableAPISubResource,
+ CreateableAPIResource,
+ DeletableAPIResource,
+ DeletableAPISubResource,
+ GetableAPIResource,
+ ListableAPIResource,
+ ListableAPISubResource,
+ UpdatableAPIResource,
+ UpdatableAPISubResource,
+)
+
+from datadog.api.dashboard_list_v2 import DashboardListV2
+
+
+class DashboardList(
+ AddableAPISubResource,
+ CreateableAPIResource,
+ DeletableAPIResource,
+ DeletableAPISubResource,
+ GetableAPIResource,
+ ListableAPIResource,
+ ListableAPISubResource,
+ UpdatableAPIResource,
+ UpdatableAPISubResource,
+):
+ """
+ A wrapper around Dashboard List HTTP API.
+ """
+
+ _resource_name = "dashboard/lists/manual"
+ _sub_resource_name = "dashboards"
+
+ # Support for new API version (api.DashboardList.v2)
+ # Note: This needs to be removed after complete migration of these endpoints from v1 to v2.
+ v2 = DashboardListV2()
diff --git a/lambdas/aws-dd-forwarder-3.127.0/datadog/api/dashboards.py b/lambdas/aws-dd-forwarder-3.127.0/datadog/api/dashboards.py
new file mode 100644
index 0000000..dab9b4d
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/datadog/api/dashboards.py
@@ -0,0 +1,20 @@
+# Unless explicitly stated otherwise all files in this repository are licensed under the BSD-3-Clause License.
+# This product includes software developed at Datadog (https://www.datadoghq.com/).
+# Copyright 2015-Present Datadog, Inc
+from datadog.api.resources import (
+ GetableAPIResource,
+ CreateableAPIResource,
+ UpdatableAPIResource,
+ DeletableAPIResource,
+ ListableAPIResource,
+)
+
+
+class Dashboard(
+ GetableAPIResource, CreateableAPIResource, UpdatableAPIResource, DeletableAPIResource, ListableAPIResource
+):
+ """
+ A wrapper around Dashboard HTTP API.
+ """
+
+ _resource_name = "dashboard"
diff --git a/lambdas/aws-dd-forwarder-3.127.0/datadog/api/distributions.py b/lambdas/aws-dd-forwarder-3.127.0/datadog/api/distributions.py
new file mode 100644
index 0000000..918f7d8
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/datadog/api/distributions.py
@@ -0,0 +1,45 @@
+# Unless explicitly stated otherwise all files in this repository are licensed under the BSD-3-Clause License.
+# This product includes software developed at Datadog (https://www.datadoghq.com/).
+# Copyright 2015-Present Datadog, Inc
+# datadog
+from datadog.api.format import format_points
+from datadog.api.resources import SendableAPIResource
+
+
+class Distribution(SendableAPIResource):
+ """A wrapper around Distribution HTTP API"""
+
+ _resource_name = "distribution_points"
+
+ @classmethod
+ def send(cls, distributions=None, attach_host_name=True, compress_payload=False, **distribution):
+ """
+ Submit a distribution metric or a list of distribution metrics to the distribution metric
+ API
+
+ :param compress_payload: compress the payload using zlib
+ :type compress_payload: bool
+ :param metric: the name of the time series
+ :type metric: string
+ :param points: a (timestamp, [list of values]) pair or
+ list of (timestamp, [list of values]) pairs
+ :type points: list
+ :param host: host name that produced the metric
+ :type host: string
+ :param tags: list of tags associated with the metric.
+ :type tags: string list
+ :returns: Dictionary representing the API's JSON response
+ """
+ if distributions:
+ # Multiple distributions are sent
+ for d in distributions:
+ if isinstance(d, dict):
+ d["points"] = format_points(d["points"])
+ series_dict = {"series": distributions}
+ else:
+ # One distribution is sent
+ distribution["points"] = format_points(distribution["points"])
+ series_dict = {"series": [distribution]}
+ return super(Distribution, cls).send(
+ attach_host_name=attach_host_name, compress_payload=compress_payload, **series_dict
+ )
diff --git a/lambdas/aws-dd-forwarder-3.127.0/datadog/api/downtimes.py b/lambdas/aws-dd-forwarder-3.127.0/datadog/api/downtimes.py
new file mode 100644
index 0000000..567ed9e
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/datadog/api/downtimes.py
@@ -0,0 +1,38 @@
+# Unless explicitly stated otherwise all files in this repository are licensed under the BSD-3-Clause License.
+# This product includes software developed at Datadog (https://www.datadoghq.com/).
+# Copyright 2015-Present Datadog, Inc
+from datadog.api.resources import (
+ GetableAPIResource,
+ CreateableAPIResource,
+ UpdatableAPIResource,
+ ListableAPIResource,
+ DeletableAPIResource,
+ ActionAPIResource,
+)
+
+
+class Downtime(
+ GetableAPIResource,
+ CreateableAPIResource,
+ UpdatableAPIResource,
+ ListableAPIResource,
+ DeletableAPIResource,
+ ActionAPIResource,
+):
+ """
+ A wrapper around Monitor Downtiming HTTP API.
+ """
+
+ _resource_name = "downtime"
+
+ @classmethod
+ def cancel_downtime_by_scope(cls, **body):
+ """
+ Cancels all downtimes matching the scope.
+
+ :param scope: scope to cancel downtimes by
+ :type scope: string
+
+ :returns: Dictionary representing the API's JSON response
+ """
+ return super(Downtime, cls)._trigger_class_action("POST", "cancel/by_scope", **body)
diff --git a/lambdas/aws-dd-forwarder-3.127.0/datadog/api/events.py b/lambdas/aws-dd-forwarder-3.127.0/datadog/api/events.py
new file mode 100644
index 0000000..55b176f
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/datadog/api/events.py
@@ -0,0 +1,95 @@
+# Unless explicitly stated otherwise all files in this repository are licensed under the BSD-3-Clause License.
+# This product includes software developed at Datadog (https://www.datadoghq.com/).
+# Copyright 2015-Present Datadog, Inc
+from datadog.api.exceptions import ApiError
+from datadog.api.resources import GetableAPIResource, CreateableAPIResource, SearchableAPIResource
+from datadog.util.compat import iteritems
+
+
+class Event(GetableAPIResource, CreateableAPIResource, SearchableAPIResource):
+ """
+ A wrapper around Event HTTP API.
+ """
+
+ _resource_name = "events"
+ _timestamp_keys = set(["start", "end"])
+
+ @classmethod
+ def create(cls, attach_host_name=True, **params):
+ """
+ Post an event.
+
+ :param title: title for the new event
+ :type title: string
+
+ :param text: event message
+ :type text: string
+
+ :param aggregation_key: key by which to group events in event stream
+ :type aggregation_key: string
+
+ :param alert_type: "error", "warning", "info" or "success".
+ :type alert_type: string
+
+ :param date_happened: when the event occurred. if unset defaults to the current time. \
+ (POSIX timestamp)
+ :type date_happened: integer
+
+ :param handle: user to post the event as. defaults to owner of the application key used \
+ to submit.
+ :type handle: string
+
+ :param priority: priority to post the event as. ("normal" or "low", defaults to "normal")
+ :type priority: string
+
+ :param related_event_id: post event as a child of the given event
+ :type related_event_id: id
+
+ :param tags: tags to post the event with
+ :type tags: list of strings
+
+ :param host: host to post the event with
+ :type host: string
+
+ :param device_name: device_name to post the event with
+ :type device_name: list of strings
+
+ :returns: Dictionary representing the API's JSON response
+
+ >>> title = "Something big happened!"
+ >>> text = 'And let me tell you all about it here!'
+ >>> tags = ['version:1', 'application:web']
+
+ >>> api.Event.create(title=title, text=text, tags=tags)
+ """
+ if params.get("alert_type"):
+ if params["alert_type"] not in ["error", "warning", "info", "success"]:
+ raise ApiError("Parameter alert_type must be either error, warning, info or success")
+
+ return super(Event, cls).create(attach_host_name=attach_host_name, **params)
+
+ @classmethod
+ def query(cls, **params):
+ """
+ Get the events that occurred between the *start* and *end* POSIX timestamps,
+ optional filtered by *priority* ("low" or "normal"), *sources* and
+ *tags*.
+
+ See the `event API documentation `_ for the
+ event data format.
+
+ :returns: Dictionary representing the API's JSON response
+
+ >>> api.Event.query(start=1313769783, end=1419436870, priority="normal", \
+ tags=["application:web"])
+ """
+
+ def timestamp_to_integer(k, v):
+ if k in cls._timestamp_keys:
+ return int(v)
+ else:
+ return v
+
+ params = dict((k, timestamp_to_integer(k, v)) for k, v in iteritems(params))
+
+ return super(Event, cls)._search(**params)
diff --git a/lambdas/aws-dd-forwarder-3.127.0/datadog/api/exceptions.py b/lambdas/aws-dd-forwarder-3.127.0/datadog/api/exceptions.py
new file mode 100644
index 0000000..afdfa36
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/datadog/api/exceptions.py
@@ -0,0 +1,105 @@
+# Unless explicitly stated otherwise all files in this repository are licensed under the BSD-3-Clause License.
+# This product includes software developed at Datadog (https://www.datadoghq.com/).
+# Copyright 2015-Present Datadog, Inc
+"""
+API & HTTP Clients exceptions.
+"""
+
+
+class DatadogException(Exception):
+ """
+ Base class for Datadog API exceptions. Use this for patterns like the following:
+
+ try:
+ # do something with the Datadog API
+ except datadog.api.exceptions.DatadogException:
+ # handle any Datadog-specific exceptions
+ """
+
+
+class ProxyError(DatadogException):
+ """
+ HTTP connection to the configured proxy server failed.
+ """
+
+ def __init__(self, method, url, exception):
+ message = (
+ u"Could not request {method} {url}: Unable to connect to proxy. "
+ u"Please check the proxy configuration and try again.".format(method=method, url=url)
+ )
+ super(ProxyError, self).__init__(message)
+
+
+class ClientError(DatadogException):
+ """
+ HTTP connection to Datadog endpoint is not possible.
+ """
+
+ def __init__(self, method, url, exception):
+ message = (
+ u"Could not request {method} {url}: {exception}. "
+ u"Please check the network connection or try again later. "
+ u"If the problem persists, please contact support@datadoghq.com".format(
+ method=method, url=url, exception=exception
+ )
+ )
+ super(ClientError, self).__init__(message)
+
+
+class HttpTimeout(DatadogException):
+ """
+ HTTP connection timeout.
+ """
+
+ def __init__(self, method, url, timeout):
+ message = (
+ u"{method} {url} timed out after {timeout}. "
+ u"Please try again later. "
+ u"If the problem persists, please contact support@datadoghq.com".format(
+ method=method, url=url, timeout=timeout
+ )
+ )
+ super(HttpTimeout, self).__init__(message)
+
+
+class HttpBackoff(DatadogException):
+ """
+ Backing off after too many timeouts.
+ """
+
+ def __init__(self, backoff_period):
+ message = u"Too many timeouts. Won't try again for {backoff_period} seconds. ".format(
+ backoff_period=backoff_period
+ )
+ super(HttpBackoff, self).__init__(message)
+
+
+class HTTPError(DatadogException):
+ """
+ Datadog returned a HTTP error.
+ """
+
+ def __init__(self, status_code=None, reason=None):
+ reason = u" - {reason}".format(reason=reason) if reason else u""
+ message = (
+ u"Datadog returned a bad HTTP response code: {status_code}{reason}. "
+ u"Please try again later. "
+ u"If the problem persists, please contact support@datadoghq.com".format(
+ status_code=status_code,
+ reason=reason,
+ )
+ )
+
+ super(HTTPError, self).__init__(message)
+
+
+class ApiError(DatadogException):
+ """
+ Datadog returned an API error (known HTTPError).
+
+ Matches the following status codes: 400, 401, 403, 404, 409, 429.
+ """
+
+
+class ApiNotInitialized(DatadogException):
+ "No API key is set"
diff --git a/lambdas/aws-dd-forwarder-3.127.0/datadog/api/format.py b/lambdas/aws-dd-forwarder-3.127.0/datadog/api/format.py
new file mode 100644
index 0000000..d3e5b72
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/datadog/api/format.py
@@ -0,0 +1,44 @@
+# Unless explicitly stated otherwise all files in this repository are licensed under the BSD-3-Clause License.
+# This product includes software developed at Datadog (https://www.datadoghq.com/).
+# Copyright 2015-Present Datadog, Inc
+from numbers import Number
+import sys
+import time
+
+if sys.version_info[0] >= 3:
+ from collections.abc import Iterable
+else:
+ from collections import Iterable
+
+
+def format_points(points):
+ """
+ Format `points` parameter.
+
+ Input:
+ a value or (timestamp, value) pair or a list of value or (timestamp, value) pairs
+
+ Returns:
+ list of (timestamp, float value) pairs
+
+ """
+ now = time.time()
+ if not isinstance(points, list):
+ points = [points]
+
+ formatted_points = []
+ for point in points:
+ if isinstance(point, Number):
+ timestamp = now
+ value = float(point)
+ # Distributions contain a list of points
+ else:
+ timestamp = point[0]
+ if isinstance(point[1], Iterable):
+ value = [float(p) for p in point[1]]
+ else:
+ value = float(point[1])
+
+ formatted_points.append((timestamp, value))
+
+ return formatted_points
diff --git a/lambdas/aws-dd-forwarder-3.127.0/datadog/api/gcp_integration.py b/lambdas/aws-dd-forwarder-3.127.0/datadog/api/gcp_integration.py
new file mode 100644
index 0000000..978e1ae
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/datadog/api/gcp_integration.py
@@ -0,0 +1,93 @@
+# Unless explicitly stated otherwise all files in this repository are licensed under the BSD-3-Clause License.
+# This product includes software developed at Datadog (https://www.datadoghq.com/).
+# Copyright 2015-Present Datadog, Inc
+from datadog.api.resources import GetableAPIResource, CreateableAPIResource, DeletableAPIResource, UpdatableAPIResource
+
+
+class GcpIntegration(GetableAPIResource, CreateableAPIResource, DeletableAPIResource, UpdatableAPIResource):
+ """
+ A wrapper around GCP integration API.
+ """
+
+ _resource_name = "integration"
+ _resource_id = "gcp"
+
+ @classmethod
+ def list(cls, **params):
+ """
+ List all Datadog-Gcp integrations available in your Datadog organization.
+
+ >>> api.GcpIntegration.list()
+ """
+ return super(GcpIntegration, cls).get(id=cls._resource_id, **params)
+
+ @classmethod
+ def delete(cls, **body):
+ """
+ Delete a given Datadog-GCP integration.
+
+ >>> project_id=""
+ >>> client_email=""
+
+ >>> api.GcpIntegration.delete(project_id=project_id, client_email=client_email)
+ """
+ return super(GcpIntegration, cls).delete(id=cls._resource_id, body=body)
+
+ @classmethod
+ def create(cls, **params):
+ """
+ Add a new GCP integration config.
+
+ All of the following fields values are provided by the \
+ JSON service account key file created in the GCP Console \
+ for service accounts; Refer to the Datadog-Google Cloud \
+ Platform integration installation instructions to see how \
+ to generate one for your organization. For further references, \
+ consult the Google Cloud service account documentation.
+
+ >>> type="service_account"
+ >>> project_id=""
+ >>> private_key_id=""
+ >>> private_key=""
+ >>> client_email=""
+ >>> client_id=""
+ >>> auth_uri=">> token_uri=""
+ >>> auth_provider_x509_cert_url=""
+ >>> client_x509_cert_url=""
+ >>> host_filters=":,:"
+
+ >>> api.GcpIntegration.create(type=type, project_id=project_id, \
+ private_key_id=private_key_id,private_key=private_key, \
+ client_email=client_email, client_id=client_id, \
+ auth_uri=auth_uri, token_uri=token_uri, \
+ auth_provider_x509_cert_url=auth_provider_x509_cert_url, \
+ client_x509_cert_url=client_x509_cert_url, host_filters=host_filters)
+ """
+ return super(GcpIntegration, cls).create(id=cls._resource_id, **params)
+
+ @classmethod
+ def update(cls, **body):
+ """
+ Update an existing service account partially (one or multiple fields), \
+ by supplying a new value for the field(s) to be updated.
+
+ `project_id` and `client_email` are required, in order to identify the \
+ right service account to update. \
+ The unspecified fields will keep their original values.
+
+ The only use case for updating this integration is to change \
+ host filtering and automute settings. Otherwise, an entirely \
+ new integration config is needed.
+
+ >>> project_id=""
+ >>> client_email=""
+ >>> host_filters=""
+ >>> automute=true #boolean
+
+ >>> api.GcpIntegration.update(project_id=project_id, \
+ client_email=client_email, host_filters=host_filters, \
+ automute=automute)
+ """
+ params = {}
+ return super(GcpIntegration, cls).update(id=cls._resource_id, params=params, **body)
diff --git a/lambdas/aws-dd-forwarder-3.127.0/datadog/api/graphs.py b/lambdas/aws-dd-forwarder-3.127.0/datadog/api/graphs.py
new file mode 100644
index 0000000..ef29d70
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/datadog/api/graphs.py
@@ -0,0 +1,84 @@
+# Unless explicitly stated otherwise all files in this repository are licensed under the BSD-3-Clause License.
+# This product includes software developed at Datadog (https://www.datadoghq.com/).
+# Copyright 2015-Present Datadog, Inc
+from datadog.util.compat import urlparse
+from datadog.api.resources import CreateableAPIResource, ActionAPIResource, GetableAPIResource, ListableAPIResource
+
+
+class Graph(CreateableAPIResource, ActionAPIResource):
+ """
+ A wrapper around Graph HTTP API.
+ """
+
+ _resource_name = "graph/snapshot"
+
+ @classmethod
+ def create(cls, **params):
+ """
+ Take a snapshot of a graph, returning the full url to the snapshot.
+
+ :param metric_query: metric query
+ :type metric_query: string query
+
+ :param start: query start timestamp
+ :type start: POSIX timestamp
+
+ :param end: query end timestamp
+ :type end: POSIX timestamp
+
+ :param event_query: a query that will add event bands to the graph
+ :type event_query: string query
+
+ :returns: Dictionary representing the API's JSON response
+ """
+ return super(Graph, cls).create(method="GET", **params)
+
+ @classmethod
+ def status(cls, snapshot_url):
+ """
+ Returns the status code of snapshot. Can be used to know when the
+ snapshot is ready for download.
+
+ :param snapshot_url: snapshot URL to check
+ :type snapshot_url: string url
+
+ :returns: Dictionary representing the API's JSON response
+ """
+ snap_path = urlparse(snapshot_url).path
+ snap_path = snap_path.split("/snapshot/view/")[1].split(".png")[0]
+
+ snapshot_status_url = "graph/snapshot_status/{0}".format(snap_path)
+
+ return super(Graph, cls)._trigger_action("GET", snapshot_status_url)
+
+
+class Embed(ListableAPIResource, GetableAPIResource, ActionAPIResource, CreateableAPIResource):
+ """
+ A wrapper around Embed HTTP API.
+ """
+
+ _resource_name = "graph/embed"
+
+ @classmethod
+ def enable(cls, embed_id):
+ """
+ Enable a specified embed.
+
+ :param embed_id: embed token
+ :type embed_id: string embed token
+
+ :returns: Dictionary representing the API's JSON response
+ """
+ return super(Embed, cls)._trigger_class_action("GET", id=embed_id, action_name="enable")
+
+ @classmethod
+ def revoke(cls, embed_id):
+ """
+ Revoke a specified embed.
+
+ :param embed_id: embed token
+ :type embed_id: string embed token
+
+ :returns: Dictionary representing the API's JSON response
+ """
+ return super(Embed, cls)._trigger_class_action("GET", id=embed_id, action_name="revoke")
diff --git a/lambdas/aws-dd-forwarder-3.127.0/datadog/api/hosts.py b/lambdas/aws-dd-forwarder-3.127.0/datadog/api/hosts.py
new file mode 100644
index 0000000..5bc2a32
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/datadog/api/hosts.py
@@ -0,0 +1,91 @@
+# Unless explicitly stated otherwise all files in this repository are licensed under the BSD-3-Clause License.
+# This product includes software developed at Datadog (https://www.datadoghq.com/).
+# Copyright 2015-Present Datadog, Inc
+from datadog.api.resources import ActionAPIResource, SearchableAPIResource
+
+
+class Host(ActionAPIResource):
+ """
+ A wrapper around Host HTTP API.
+ """
+
+ _resource_name = "host"
+
+ @classmethod
+ def mute(cls, host_name, **body):
+ """
+ Mute a host.
+
+ :param host_name: hostname
+ :type host_name: string
+
+ :param end: timestamp to end muting
+ :type end: POSIX timestamp
+
+ :param override: if true and the host is already muted, will override\
+ existing end on the host
+ :type override: bool
+
+ :param message: message to associate with the muting of this host
+ :type message: string
+
+ :returns: Dictionary representing the API's JSON response
+
+ """
+ return super(Host, cls)._trigger_class_action("POST", "mute", host_name, **body)
+
+ @classmethod
+ def unmute(cls, host_name):
+ """
+ Unmute a host.
+
+ :param host_name: hostname
+ :type host_name: string
+
+ :returns: Dictionary representing the API's JSON response
+
+ """
+ return super(Host, cls)._trigger_class_action("POST", "unmute", host_name)
+
+
+class Hosts(ActionAPIResource, SearchableAPIResource):
+ """
+ A wrapper around Hosts HTTP API.
+ """
+
+ _resource_name = "hosts"
+
+ @classmethod
+ def search(cls, **params):
+ """
+ Search among hosts live within the past 2 hours. Max 100
+ results at a time.
+
+ :param filter: query to filter search results
+ :type filter: string
+
+ :param sort_field: "status", "apps", "cpu", "iowait", or "load"
+ :type sort_field: string
+
+ :param sort_dir: "asc" or "desc"
+ :type sort_dir: string
+
+ :param start: host result to start at
+ :type start: integer
+
+ :param count: number of host results to return
+ :type count: integer
+
+ :returns: Dictionary representing the API's JSOn response
+
+ """
+ return super(Hosts, cls)._search(**params)
+
+ @classmethod
+ def totals(cls):
+ """
+ Get total number of hosts active and up.
+
+ :returns: Dictionary representing the API's JSON response
+ """
+ return super(Hosts, cls)._trigger_class_action("GET", "totals")
diff --git a/lambdas/aws-dd-forwarder-3.127.0/datadog/api/http_client.py b/lambdas/aws-dd-forwarder-3.127.0/datadog/api/http_client.py
new file mode 100644
index 0000000..f058393
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/datadog/api/http_client.py
@@ -0,0 +1,195 @@
+# Unless explicitly stated otherwise all files in this repository are licensed under the BSD-3-Clause License.
+# This product includes software developed at Datadog (https://www.datadoghq.com/).
+# Copyright 2015-Present Datadog, Inc
+"""
+Available HTTP Client for Datadog API client.
+
+Priority:
+1. `requests` 3p module
+2. `urlfetch` 3p module - Google App Engine only
+"""
+# stdlib
+import copy
+import logging
+import platform
+import urllib
+from threading import Lock
+
+# 3p
+try:
+ import requests
+ import requests.adapters
+except ImportError:
+ requests = None # type: ignore
+
+try:
+ from google.appengine.api import urlfetch, urlfetch_errors
+except ImportError:
+ urlfetch, urlfetch_errors = None, None
+
+# datadog
+from datadog.api.exceptions import ProxyError, ClientError, HTTPError, HttpTimeout
+
+
+log = logging.getLogger("datadog.api")
+
+
+def _get_user_agent_header():
+ from datadog import version
+
+ return "datadogpy/{version} (python {pyver}; os {os}; arch {arch})".format(
+ version=version.__version__,
+ pyver=platform.python_version(),
+ os=platform.system().lower(),
+ arch=platform.machine().lower(),
+ )
+
+
+def _remove_context(exc):
+ """Python3: remove context from chained exceptions to prevent leaking API keys in tracebacks."""
+ exc.__cause__ = None
+ return exc
+
+
+class HTTPClient(object):
+ """
+ An abstract generic HTTP client. Subclasses must implement the `request` methods.
+ """
+
+ @classmethod
+ def request(cls, method, url, headers, params, data, timeout, proxies, verify, max_retries):
+ """
+ Main method to be implemented by HTTP clients.
+
+ The returned data structure has the following fields:
+ * `content`: string containing the response from the server
+ * `status_code`: HTTP status code returned by the server
+
+ Can raise the following exceptions:
+ * `ClientError`: server cannot be contacted
+ * `HttpTimeout`: connection timed out
+ * `HTTPError`: unexpected HTTP response code
+ """
+ raise NotImplementedError(u"Must be implemented by HTTPClient subclasses.")
+
+
+class RequestClient(HTTPClient):
+ """
+ HTTP client based on 3rd party `requests` module, using a single session.
+ This allows us to keep the session alive to spare some execution time.
+ """
+
+ _session = None
+ _session_lock = Lock()
+
+ @classmethod
+ def request(cls, method, url, headers, params, data, timeout, proxies, verify, max_retries):
+ try:
+
+ with cls._session_lock:
+ if cls._session is None:
+ cls._session = requests.Session()
+ http_adapter = requests.adapters.HTTPAdapter(max_retries=max_retries)
+ cls._session.mount("https://", http_adapter)
+ cls._session.headers.update({"User-Agent": _get_user_agent_header()})
+
+ result = cls._session.request(
+ method, url, headers=headers, params=params, data=data, timeout=timeout, proxies=proxies, verify=verify
+ )
+
+ result.raise_for_status()
+
+ except requests.exceptions.ProxyError as e:
+ raise _remove_context(ProxyError(method, url, e))
+ except requests.ConnectionError as e:
+ raise _remove_context(ClientError(method, url, e))
+ except requests.exceptions.Timeout:
+ raise _remove_context(HttpTimeout(method, url, timeout))
+ except requests.exceptions.HTTPError as e:
+ if e.response.status_code in (400, 401, 403, 404, 409, 429):
+ # This gets caught afterwards and raises an ApiError exception
+ pass
+ else:
+ raise _remove_context(HTTPError(e.response.status_code, result.reason))
+ except TypeError:
+ raise TypeError(
+ u"Your installed version of `requests` library seems not compatible with"
+ u"Datadog's usage. We recommend upgrading it ('pip install -U requests')."
+ u"If you need help or have any question, please contact support@datadoghq.com"
+ )
+
+ return result
+
+
+class URLFetchClient(HTTPClient):
+ """
+ HTTP client based on Google App Engine `urlfetch` module.
+ """
+
+ @classmethod
+ def request(cls, method, url, headers, params, data, timeout, proxies, verify, max_retries):
+ """
+ Wrapper around `urlfetch.fetch` method.
+
+ TO IMPLEMENT:
+ * `max_retries`
+ """
+ # No local certificate file can be used on Google App Engine
+ validate_certificate = True if verify else False
+
+ # Encode parameters in the url
+ url_with_params = "{url}?{params}".format(url=url, params=urllib.urlencode(params))
+ newheaders = copy.deepcopy(headers)
+ newheaders["User-Agent"] = _get_user_agent_header()
+
+ try:
+ result = urlfetch.fetch(
+ url=url_with_params,
+ method=method,
+ headers=newheaders,
+ validate_certificate=validate_certificate,
+ deadline=timeout,
+ payload=data,
+ # setting follow_redirects=False may be slightly faster:
+ # https://cloud.google.com/appengine/docs/python/microservice-performance#use_the_shortest_route
+ follow_redirects=False,
+ )
+
+ cls.raise_on_status(result)
+
+ except urlfetch.DownloadError as e:
+ raise ClientError(method, url, e)
+ except urlfetch_errors.DeadlineExceededError:
+ raise HttpTimeout(method, url, timeout)
+
+ return result
+
+ @classmethod
+ def raise_on_status(cls, result):
+ """
+ Raise on HTTP status code errors.
+ """
+ status_code = result.status_code
+
+ if (status_code / 100) != 2:
+ if status_code in (400, 401, 403, 404, 409, 429):
+ pass
+ else:
+ raise HTTPError(status_code)
+
+
+def resolve_http_client():
+ """
+ Resolve an appropriate HTTP client based the defined priority and user environment.
+ """
+ if requests:
+ log.debug(u"Use `requests` based HTTP client.")
+ return RequestClient
+
+ if urlfetch and urlfetch_errors:
+ log.debug(u"Use `urlfetch` based HTTP client.")
+ return URLFetchClient
+
+ raise ImportError(
+ u"Datadog API client was unable to resolve a HTTP client. " u" Please install `requests` library."
+ )
diff --git a/lambdas/aws-dd-forwarder-3.127.0/datadog/api/infrastructure.py b/lambdas/aws-dd-forwarder-3.127.0/datadog/api/infrastructure.py
new file mode 100644
index 0000000..806a051
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/datadog/api/infrastructure.py
@@ -0,0 +1,28 @@
+# Unless explicitly stated otherwise all files in this repository are licensed under the BSD-3-Clause License.
+# This product includes software developed at Datadog (https://www.datadoghq.com/).
+# Copyright 2015-Present Datadog, Inc
+from datadog.api.resources import SearchableAPIResource
+
+
+class Infrastructure(SearchableAPIResource):
+ """
+ A wrapper around Infrastructure HTTP API.
+ """
+
+ _resource_name = "search"
+
+ @classmethod
+ def search(cls, **params):
+ """
+ Search for entities in Datadog.
+
+ :param q: a query to search for host and metrics
+ :type q: string query
+
+ :returns: Dictionary representing the API's JSON response
+ """
+ # Deprecate the hosts search param
+ query = params.get("q", "").split(":")
+ if len(query) > 1 and query[0] == "hosts":
+ print("[DEPRECATION] Infrastructure.search() is deprecated for ", "hosts. Use `Hosts.search` instead.")
+ return super(Infrastructure, cls)._search(**params)
diff --git a/lambdas/aws-dd-forwarder-3.127.0/datadog/api/logs.py b/lambdas/aws-dd-forwarder-3.127.0/datadog/api/logs.py
new file mode 100644
index 0000000..a87efa2
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/datadog/api/logs.py
@@ -0,0 +1,22 @@
+# Unless explicitly stated otherwise all files in this repository are licensed under the BSD-3-Clause License.
+# This product includes software developed at Datadog (https://www.datadoghq.com/).
+# Copyright 2015-Present Datadog, Inc
+from datadog.api.resources import CreateableAPIResource
+from datadog.api.api_client import APIClient
+
+
+class Logs(CreateableAPIResource):
+ """
+ A wrapper around Log HTTP API.
+ """
+
+ _resource_name = "logs-queries"
+
+ @classmethod
+ def list(cls, data):
+ path = "{resource_name}/list".format(
+ resource_name=cls._resource_name,
+ )
+ api_version = getattr(cls, "_api_version", None)
+
+ return APIClient.submit("POST", path, api_version, data)
diff --git a/lambdas/aws-dd-forwarder-3.127.0/datadog/api/metadata.py b/lambdas/aws-dd-forwarder-3.127.0/datadog/api/metadata.py
new file mode 100644
index 0000000..6c251e5
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/datadog/api/metadata.py
@@ -0,0 +1,64 @@
+# Unless explicitly stated otherwise all files in this repository are licensed under the BSD-3-Clause License.
+# This product includes software developed at Datadog (https://www.datadoghq.com/).
+# Copyright 2015-Present Datadog, Inc
+# datadog
+from datadog.api.resources import GetableAPIResource, UpdatableAPIResource
+
+
+class Metadata(GetableAPIResource, UpdatableAPIResource):
+ """
+ A wrapper around Metric Metadata HTTP API
+ """
+
+ _resource_name = "metrics"
+
+ @classmethod
+ def get(cls, metric_name):
+ """
+ Get metadata information on an existing Datadog metric
+
+ param metric_name: metric name (ex. system.cpu.idle)
+
+ :returns: Dictionary representing the API's JSON response
+ """
+ if not metric_name:
+ raise KeyError("'metric_name' parameter is required")
+
+ return super(Metadata, cls).get(metric_name)
+
+ @classmethod
+ def update(cls, metric_name, **params):
+ """
+ Update metadata fields for an existing Datadog metric.
+ If the metadata does not exist for the metric it is created by
+ the update.
+
+ :param type: type of metric (ex. "gauge", "rate", etc.)
+ see http://docs.datadoghq.com/metrictypes/
+ :type type: string
+
+ :param description: description of the metric
+ :type description: string
+
+ :param short_name: short name of the metric
+ :type short_name: string
+
+ :param unit: unit type associated with the metric (ex. "byte", "operation")
+ see http://docs.datadoghq.com/units/ for full list
+ :type unit: string
+
+ :param per_unit: per unit type (ex. "second" as in "queries per second")
+ see http://docs.datadoghq.com/units/ for full list
+ :type per_unit: string
+
+ :param statsd_interval: statsd flush interval for metric in seconds (if applicable)
+ :type statsd_interval: integer
+
+ :returns: Dictionary representing the API's JSON response
+
+ >>> api.Metadata.update(metric_name='api.requests.served', metric_type="counter")
+ """
+ if not metric_name:
+ raise KeyError("'metric_name' parameter is required")
+
+ return super(Metadata, cls).update(id=metric_name, **params)
diff --git a/lambdas/aws-dd-forwarder-3.127.0/datadog/api/metrics.py b/lambdas/aws-dd-forwarder-3.127.0/datadog/api/metrics.py
new file mode 100644
index 0000000..252ea88
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/datadog/api/metrics.py
@@ -0,0 +1,147 @@
+# Unless explicitly stated otherwise all files in this repository are licensed under the BSD-3-Clause License.
+# This product includes software developed at Datadog (https://www.datadoghq.com/).
+# Copyright 2015-Present Datadog, Inc
+# datadog
+from datadog.api.exceptions import ApiError
+from datadog.api.format import format_points
+from datadog.api.resources import SearchableAPIResource, SendableAPIResource, ListableAPIResource
+
+
+class Metric(SearchableAPIResource, SendableAPIResource, ListableAPIResource):
+ """
+ A wrapper around Metric HTTP API
+ """
+
+ _resource_name = None
+
+ _METRIC_QUERY_ENDPOINT = "query"
+ _METRIC_SUBMIT_ENDPOINT = "series"
+ _METRIC_LIST_ENDPOINT = "metrics"
+
+ @classmethod
+ def list(cls, from_epoch):
+ """
+ Get a list of active metrics since a given time (Unix Epoc)
+
+ :param from_epoch: Start time in Unix Epoc (seconds)
+
+ :returns: Dictionary containing a list of active metrics
+ """
+
+ cls._resource_name = cls._METRIC_LIST_ENDPOINT
+
+ try:
+ seconds = int(from_epoch)
+ params = {"from": seconds}
+ except ValueError:
+ raise ApiError("Parameter 'from_epoch' must be an integer")
+
+ return super(Metric, cls).get_all(**params)
+
+ @staticmethod
+ def _rename_metric_type(metric):
+ """
+ FIXME DROPME in 1.0:
+
+ API documentation was illegitimately promoting usage of `metric_type` parameter
+ instead of `type`.
+ To be consistent and avoid 'backward incompatibilities', properly rename this parameter.
+ """
+ if "metric_type" in metric:
+ metric["type"] = metric.pop("metric_type")
+
+ @classmethod
+ def send(cls, metrics=None, attach_host_name=True, compress_payload=False, **single_metric):
+ """
+ Submit a metric or a list of metrics to the metric API
+ A metric dictionary should consist of 5 keys: metric, points, host, tags, type (some of which optional),
+ see below:
+
+ :param metric: the name of the time series
+ :type metric: string
+
+ :param compress_payload: compress the payload using zlib
+ :type compress_payload: bool
+
+ :param metrics: a list of dictionaries, each item being a metric to send
+ :type metrics: list
+
+ :param points: a (timestamp, value) pair or list of (timestamp, value) pairs
+ :type points: list
+
+ :param host: host name that produced the metric
+ :type host: string
+
+ :param tags: list of tags associated with the metric.
+ :type tags: string list
+
+ :param type: type of the metric
+ :type type: 'gauge' or 'count' or 'rate' string
+
+ >>> api.Metric.send(metric='my.series', points=[(now, 15), (future_10s, 16)])
+
+ >>> metrics = [{'metric': 'my.series', 'type': 'gauge', 'points': [(now, 15), (future_10s, 16)]},
+ {'metric': 'my.series2', 'type': 'gauge', 'points': [(now, 15), (future_10s, 16)]}]
+ >>> api.Metric.send(metrics=metrics)
+
+ :returns: Dictionary representing the API's JSON response
+ """
+ # Set the right endpoint
+ cls._resource_name = cls._METRIC_SUBMIT_ENDPOINT
+
+ # Format the payload
+ try:
+ if metrics:
+ for metric in metrics:
+ if isinstance(metric, dict):
+ cls._rename_metric_type(metric)
+ metric["points"] = format_points(metric["points"])
+ metrics_dict = {"series": metrics}
+ else:
+ cls._rename_metric_type(single_metric)
+ single_metric["points"] = format_points(single_metric["points"])
+ metrics = [single_metric]
+ metrics_dict = {"series": metrics}
+
+ except KeyError:
+ raise KeyError("'points' parameter is required")
+
+ return super(Metric, cls).send(
+ attach_host_name=attach_host_name, compress_payload=compress_payload, **metrics_dict
+ )
+
+ @classmethod
+ def query(cls, **params):
+ """
+ Query metrics from Datadog
+
+ :param start: query start timestamp
+ :type start: POSIX timestamp
+
+ :param end: query end timestamp
+ :type end: POSIX timestamp
+
+ :param query: metric query
+ :type query: string query
+
+ :returns: Dictionary representing the API's JSON response
+
+ *start* and *end* should be less than 24 hours apart.
+ It is *not* meant to retrieve metric data in bulk.
+
+ >>> api.Metric.query(start=int(time.time()) - 3600, end=int(time.time()),
+ query='avg:system.cpu.idle{*}')
+ """
+ # Set the right endpoint
+ cls._resource_name = cls._METRIC_QUERY_ENDPOINT
+
+ # `from` is a reserved keyword in Python, therefore
+ # `api.Metric.query(from=...)` is not permitted
+ # -> map `start` to `from` and `end` to `to`
+ try:
+ params["from"] = params.pop("start")
+ params["to"] = params.pop("end")
+ except KeyError as e:
+ raise ApiError("The parameter '{0}' is required".format(e.args[0]))
+
+ return super(Metric, cls)._search(**params)
diff --git a/lambdas/aws-dd-forwarder-3.127.0/datadog/api/monitors.py b/lambdas/aws-dd-forwarder-3.127.0/datadog/api/monitors.py
new file mode 100644
index 0000000..a2d9e74
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/datadog/api/monitors.py
@@ -0,0 +1,157 @@
+# Unless explicitly stated otherwise all files in this repository are licensed under the BSD-3-Clause License.
+# This product includes software developed at Datadog (https://www.datadoghq.com/).
+# Copyright 2015-Present Datadog, Inc
+from datadog.api.resources import (
+ GetableAPIResource,
+ CreateableAPIResource,
+ UpdatableAPIResource,
+ ListableAPIResource,
+ DeletableAPIResource,
+ ActionAPIResource,
+)
+
+
+class Monitor(
+ GetableAPIResource,
+ CreateableAPIResource,
+ UpdatableAPIResource,
+ ListableAPIResource,
+ DeletableAPIResource,
+ ActionAPIResource,
+):
+ """
+ A wrapper around Monitor HTTP API.
+ """
+
+ _resource_name = "monitor"
+
+ @classmethod
+ def get(cls, id, **params):
+ """
+ Get monitor's details.
+
+ :param id: monitor to retrieve
+ :type id: id
+
+ :param group_states: string list indicating what, if any, group states to include
+ :type group_states: string list, strings are chosen from one or more \
+ from 'all', 'alert', 'warn', or 'no data'
+
+ :returns: Dictionary representing the API's JSON response
+ """
+ if "group_states" in params and isinstance(params["group_states"], list):
+ params["group_states"] = ",".join(params["group_states"])
+
+ return super(Monitor, cls).get(id, **params)
+
+ @classmethod
+ def get_all(cls, **params):
+ """
+ Get all monitor details.
+
+ :param group_states: string list indicating what, if any, group states to include
+ :type group_states: string list, strings are chosen from one or more \
+ from 'all', 'alert', 'warn', or 'no data'
+
+ :param name: name to filter the list of monitors by
+ :type name: string
+
+ :param tags: tags to filter the list of monitors by scope
+ :type tags: string list
+
+ :param monitor_tags: list indicating what service and/or custom tags, if any, \
+ should be used to filter the list of monitors
+ :type monitor_tags: string list
+
+ :returns: Dictionary representing the API's JSON response
+ """
+ for p in ["group_states", "tags", "monitor_tags"]:
+ if p in params and isinstance(params[p], list):
+ params[p] = ",".join(params[p])
+
+ return super(Monitor, cls).get_all(**params)
+
+ @classmethod
+ def mute(cls, id, **body):
+ """
+ Mute a monitor.
+
+ :param scope: scope to apply the mute
+ :type scope: string
+
+ :param end: timestamp for when the mute should end
+ :type end: POSIX timestamp
+
+
+ :returns: Dictionary representing the API's JSON response
+ """
+ return super(Monitor, cls)._trigger_class_action("POST", "mute", id, **body)
+
+ @classmethod
+ def unmute(cls, id, **body):
+ """
+ Unmute a monitor.
+
+ :param scope: scope to apply the unmute
+ :type scope: string
+
+ :param all_scopes: if True, clears mute settings for all scopes
+ :type all_scopes: boolean
+
+ :returns: Dictionary representing the API's JSON response
+ """
+ return super(Monitor, cls)._trigger_class_action("POST", "unmute", id, **body)
+
+ @classmethod
+ def mute_all(cls):
+ """
+ Globally mute monitors.
+
+ :returns: Dictionary representing the API's JSON response
+ """
+ return super(Monitor, cls)._trigger_class_action("POST", "mute_all")
+
+ @classmethod
+ def unmute_all(cls):
+ """
+ Cancel global monitor mute setting (does not remove mute settings for individual monitors).
+
+ :returns: Dictionary representing the API's JSON response
+ """
+ return super(Monitor, cls)._trigger_class_action("POST", "unmute_all")
+
+ @classmethod
+ def search(cls, **params):
+ """
+ Search monitors.
+
+ :returns: Dictionary representing the API's JSON response
+ """
+ return super(Monitor, cls)._trigger_class_action("GET", "search", params=params)
+
+ @classmethod
+ def search_groups(cls, **params):
+ """
+ Search monitor groups.
+
+ :returns: Dictionary representing the API's JSON response
+ """
+ return super(Monitor, cls)._trigger_class_action("GET", "groups/search", params=params)
+
+ @classmethod
+ def can_delete(cls, **params):
+ """
+ Checks if the monitors corresponding to the monitor ids can be deleted.
+
+ :returns: Dictionary representing the API's JSON response
+ """
+ return super(Monitor, cls)._trigger_class_action("GET", "can_delete", params=params)
+
+ @classmethod
+ def validate(cls, **body):
+ """
+ Checks if the monitors definition is valid.
+
+ :returns: Dictionary representing the API's JSON response
+ """
+ return super(Monitor, cls)._trigger_class_action("POST", "validate", **body)
diff --git a/lambdas/aws-dd-forwarder-3.127.0/datadog/api/permissions.py b/lambdas/aws-dd-forwarder-3.127.0/datadog/api/permissions.py
new file mode 100644
index 0000000..f12dad7
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/datadog/api/permissions.py
@@ -0,0 +1,27 @@
+# Unless explicitly stated otherwise all files in this repository are licensed under the BSD-3-Clause License.
+# This product includes software developed at Datadog (https://www.datadoghq.com/).
+# Copyright 2015-Present Datadog, Inc
+from datadog.api.resources import (
+ ActionAPIResource,
+ CreateableAPIResource,
+ CustomUpdatableAPIResource,
+ DeletableAPIResource,
+ GetableAPIResource,
+ ListableAPIResource,
+)
+
+
+class Permissions(
+ ActionAPIResource,
+ CreateableAPIResource,
+ CustomUpdatableAPIResource,
+ GetableAPIResource,
+ ListableAPIResource,
+ DeletableAPIResource,
+):
+ """
+ A wrapper around Tag HTTP API.
+ """
+
+ _resource_name = "permissions"
+ _api_version = "v2"
diff --git a/lambdas/aws-dd-forwarder-3.127.0/datadog/api/resources.py b/lambdas/aws-dd-forwarder-3.127.0/datadog/api/resources.py
new file mode 100644
index 0000000..67bcc39
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/datadog/api/resources.py
@@ -0,0 +1,539 @@
+# Unless explicitly stated otherwise all files in this repository are licensed under the BSD-3-Clause License.
+# This product includes software developed at Datadog (https://www.datadoghq.com/).
+# Copyright 2015-Present Datadog, Inc
+"""
+Datadog API resources.
+"""
+
+from datadog.api.api_client import APIClient
+
+
+class CreateableAPIResource(object):
+ """
+ Creatable API Resource
+ """
+
+ @classmethod
+ def create(cls, attach_host_name=False, method="POST", id=None, params=None, **body):
+ """
+ Create a new API resource object
+
+ :param attach_host_name: link the new resource object to the host name
+ :type attach_host_name: bool
+
+ :param method: HTTP method to use to contact API endpoint
+ :type method: HTTP method string
+
+ :param id: create a new resource object as a child of the given object
+ :type id: id
+
+ :param params: new resource object source
+ :type params: dictionary
+
+ :param body: new resource object attributes
+ :type body: dictionary
+
+ :returns: Dictionary representing the API's JSON response
+ """
+ if params is None:
+ params = {}
+
+ path = cls._resource_name
+ api_version = getattr(cls, "_api_version", None)
+
+ if method == "GET":
+ return APIClient.submit("GET", path, api_version, **body)
+ if id is None:
+ return APIClient.submit("POST", path, api_version, body, attach_host_name=attach_host_name, **params)
+
+ path = "{resource_name}/{resource_id}".format(resource_name=cls._resource_name, resource_id=id)
+ return APIClient.submit("POST", path, api_version, body, attach_host_name=attach_host_name, **params)
+
+
+class SendableAPIResource(object):
+ """
+ Fork of CreateableAPIResource class with different method names
+ """
+
+ @classmethod
+ def send(cls, attach_host_name=False, id=None, compress_payload=False, **body):
+ """
+ Create an API resource object
+
+ :param attach_host_name: link the new resource object to the host name
+ :type attach_host_name: bool
+
+ :param id: create a new resource object as a child of the given object
+ :type id: id
+
+ :param compress_payload: compress the payload using zlib
+ :type compress_payload: bool
+
+ :param body: new resource object attributes
+ :type body: dictionary
+
+ :returns: Dictionary representing the API's JSON response
+ """
+ api_version = getattr(cls, "_api_version", None)
+
+ if id is None:
+ return APIClient.submit(
+ "POST",
+ cls._resource_name,
+ api_version,
+ body,
+ attach_host_name=attach_host_name,
+ compress_payload=compress_payload,
+ )
+
+ path = "{resource_name}/{resource_id}".format(resource_name=cls._resource_name, resource_id=id)
+ return APIClient.submit(
+ "POST", path, api_version, body, attach_host_name=attach_host_name, compress_payload=compress_payload
+ )
+
+
+class UpdatableAPIResource(object):
+ """
+ Updatable API Resource
+ """
+
+ @classmethod
+ def update(cls, id, params=None, **body):
+ """
+ Update an API resource object
+
+ :param params: updated resource object source
+ :type params: dictionary
+
+ :param body: updated resource object attributes
+ :type body: dictionary
+
+ :returns: Dictionary representing the API's JSON response
+ """
+ if params is None:
+ params = {}
+
+ path = "{resource_name}/{resource_id}".format(resource_name=cls._resource_name, resource_id=id)
+ api_version = getattr(cls, "_api_version", None)
+
+ return APIClient.submit("PUT", path, api_version, body, **params)
+
+
+class CustomUpdatableAPIResource(object):
+ """
+ Updatable API Resource with custom HTTP Verb
+ """
+
+ @classmethod
+ def update(cls, method=None, id=None, params=None, **body):
+ """
+ Update an API resource object
+
+ :param method: HTTP method, defaults to PUT
+ :type params: string
+
+ :param params: updatable resource id
+ :type params: string
+
+ :param params: updated resource object source
+ :type params: dictionary
+
+ :param body: updated resource object attributes
+ :type body: dictionary
+
+ :returns: Dictionary representing the API's JSON response
+ """
+
+ if method is None:
+ method = "PUT"
+ if params is None:
+ params = {}
+
+ path = "{resource_name}/{resource_id}".format(resource_name=cls._resource_name, resource_id=id)
+ api_version = getattr(cls, "_api_version", None)
+
+ return APIClient.submit(method, path, api_version, body, **params)
+
+
+class DeletableAPIResource(object):
+ """
+ Deletable API Resource
+ """
+
+ @classmethod
+ def delete(cls, id, **params):
+ """
+ Delete an API resource object
+
+ :param id: resource object to delete
+ :type id: id
+
+ :returns: Dictionary representing the API's JSON response
+ """
+ path = "{resource_name}/{resource_id}".format(resource_name=cls._resource_name, resource_id=id)
+ api_version = getattr(cls, "_api_version", None)
+
+ return APIClient.submit("DELETE", path, api_version, **params)
+
+
+class GetableAPIResource(object):
+ """
+ Getable API Resource
+ """
+
+ @classmethod
+ def get(cls, id, **params):
+ """
+ Get information about an API resource object
+
+ :param id: resource object id to retrieve
+ :type id: id
+
+ :param params: parameters to filter API resource stream
+ :type params: dictionary
+
+ :returns: Dictionary representing the API's JSON response
+ """
+ path = "{resource_name}/{resource_id}".format(resource_name=cls._resource_name, resource_id=id)
+ api_version = getattr(cls, "_api_version", None)
+
+ return APIClient.submit("GET", path, api_version, **params)
+
+
+class ListableAPIResource(object):
+ """
+ Listable API Resource
+ """
+
+ @classmethod
+ def get_all(cls, **params):
+ """
+ List API resource objects
+
+ :param params: parameters to filter API resource stream
+ :type params: dictionary
+
+ :returns: Dictionary representing the API's JSON response
+ """
+ api_version = getattr(cls, "_api_version", None)
+
+ return APIClient.submit("GET", cls._resource_name, api_version, **params)
+
+
+class ListableAPISubResource(object):
+ """
+ Listable API Sub-Resource
+ """
+
+ @classmethod
+ def get_items(cls, id, **params):
+ """
+ List API sub-resource objects from a resource
+
+ :param id: resource id to retrieve sub-resource objects from
+ :type id: id
+
+ :param params: parameters to filter API sub-resource stream
+ :type params: dictionary
+
+ :returns: Dictionary representing the API's JSON response
+ """
+
+ path = "{resource_name}/{resource_id}/{sub_resource_name}".format(
+ resource_name=cls._resource_name, resource_id=id, sub_resource_name=cls._sub_resource_name
+ )
+ api_version = getattr(cls, "_api_version", None)
+
+ return APIClient.submit("GET", path, api_version, **params)
+
+
+class AddableAPISubResource(object):
+ """
+ Addable API Sub-Resource
+ """
+
+ @classmethod
+ def add_items(cls, id, params=None, **body):
+ """
+ Add new API sub-resource objects to a resource
+
+ :param id: resource id to add sub-resource objects to
+ :type id: id
+
+ :param params: request parameters
+ :type params: dictionary
+
+ :param body: new sub-resource objects attributes
+ :type body: dictionary
+
+ :returns: Dictionary representing the API's JSON response
+ """
+ if params is None:
+ params = {}
+
+ path = "{resource_name}/{resource_id}/{sub_resource_name}".format(
+ resource_name=cls._resource_name, resource_id=id, sub_resource_name=cls._sub_resource_name
+ )
+ api_version = getattr(cls, "_api_version", None)
+
+ return APIClient.submit("POST", path, api_version, body, **params)
+
+
+class UpdatableAPISubResource(object):
+ """
+ Updatable API Sub-Resource
+ """
+
+ @classmethod
+ def update_items(cls, id, params=None, **body):
+ """
+ Update API sub-resource objects of a resource
+
+ :param id: resource id to update sub-resource objects from
+ :type id: id
+
+ :param params: request parameters
+ :type params: dictionary
+
+ :param body: updated sub-resource objects attributes
+ :type body: dictionary
+
+ :returns: Dictionary representing the API's JSON response
+ """
+ if params is None:
+ params = {}
+
+ path = "{resource_name}/{resource_id}/{sub_resource_name}".format(
+ resource_name=cls._resource_name, resource_id=id, sub_resource_name=cls._sub_resource_name
+ )
+ api_version = getattr(cls, "_api_version", None)
+
+ return APIClient.submit("PUT", path, api_version, body, **params)
+
+
+class DeletableAPISubResource(object):
+ """
+ Deletable API Sub-Resource
+ """
+
+ @classmethod
+ def delete_items(cls, id, params=None, **body):
+ """
+ Delete API sub-resource objects from a resource
+
+ :param id: resource id to delete sub-resource objects from
+ :type id: id
+
+ :param params: request parameters
+ :type params: dictionary
+
+ :param body: deleted sub-resource objects attributes
+ :type body: dictionary
+
+ :returns: Dictionary representing the API's JSON response
+ """
+ if params is None:
+ params = {}
+
+ path = "{resource_name}/{resource_id}/{sub_resource_name}".format(
+ resource_name=cls._resource_name, resource_id=id, sub_resource_name=cls._sub_resource_name
+ )
+ api_version = getattr(cls, "_api_version", None)
+
+ return APIClient.submit("DELETE", path, api_version, body, **params)
+
+
+class SearchableAPIResource(object):
+ """
+ Fork of ListableAPIResource class with different method names
+ """
+
+ @classmethod
+ def _search(cls, **params):
+ """
+ Query an API resource stream
+
+ :param params: parameters to filter API resource stream
+ :type params: dictionary
+
+ :returns: Dictionary representing the API's JSON response
+ """
+ api_version = getattr(cls, "_api_version", None)
+
+ return APIClient.submit("GET", cls._resource_name, api_version, **params)
+
+
+class ActionAPIResource(object):
+ """
+ Actionable API Resource
+ """
+
+ @classmethod
+ def _trigger_class_action(cls, method, action_name, id=None, params=None, **body):
+ """
+ Trigger an action
+
+ :param method: HTTP method to use to contact API endpoint
+ :type method: HTTP method string
+
+ :param action_name: action name
+ :type action_name: string
+
+ :param id: trigger the action for the specified resource object
+ :type id: id
+
+ :param params: action parameters
+ :type params: dictionary
+
+ :param body: action body
+ :type body: dictionary
+
+ :returns: Dictionary representing the API's JSON response
+ """
+ if params is None:
+ params = {}
+
+ api_version = getattr(cls, "_api_version", None)
+
+ if id is None:
+ path = "{resource_name}/{action_name}".format(resource_name=cls._resource_name, action_name=action_name)
+ else:
+ path = "{resource_name}/{resource_id}/{action_name}".format(
+ resource_name=cls._resource_name, resource_id=id, action_name=action_name
+ )
+ if method == "GET":
+ # Do not add body to GET requests, it causes 400 Bad request responses on EU site
+ body = None
+ return APIClient.submit(method, path, api_version, body, **params)
+
+ @classmethod
+ def _trigger_action(cls, method, name, id=None, **body):
+ """
+ Trigger an action
+
+ :param method: HTTP method to use to contact API endpoint
+ :type method: HTTP method string
+
+ :param name: action name
+ :type name: string
+
+ :param id: trigger the action for the specified resource object
+ :type id: id
+
+ :param body: action body
+ :type body: dictionary
+
+ :returns: Dictionary representing the API's JSON response
+ """
+ api_version = getattr(cls, "_api_version", None)
+ if id is None:
+ return APIClient.submit(method, name, api_version, body)
+
+ path = "{action_name}/{resource_id}".format(action_name=name, resource_id=id)
+ if method == "GET":
+ # Do not add body to GET requests, it causes 400 Bad request responses on EU site
+ body = None
+ return APIClient.submit(method, path, api_version, body)
+
+
+class UpdatableAPISyntheticsSubResource(object):
+ """
+ Update Synthetics sub resource
+ """
+
+ @classmethod
+ def update_synthetics_items(cls, id, params=None, **body):
+ """
+ Update API sub-resource objects of a resource
+
+ :param id: resource id to update sub-resource objects from
+ :type id: id
+
+ :param params: request parameters
+ :type params: dictionary
+
+ :param body: updated sub-resource objects attributes
+ :type body: dictionary
+
+ :returns: Dictionary representing the API's JSON response
+ """
+ if params is None:
+ params = {}
+
+ path = "{resource_name}/tests/{resource_id}/{sub_resource_name}".format(
+ resource_name=cls._resource_name, resource_id=id, sub_resource_name=cls._sub_resource_name
+ )
+ api_version = getattr(cls, "_api_version", None)
+
+ return APIClient.submit("PUT", path, api_version, body, **params)
+
+
+class UpdatableAPISyntheticsResource(object):
+ """
+ Update Synthetics resource
+ """
+
+ @classmethod
+ def update_synthetics(cls, id, params=None, **body):
+ """
+ Update an API resource object
+
+ :param params: updated resource object source
+ :type params: dictionary
+
+ :param body: updated resource object attributes
+ :type body: dictionary
+
+ :returns: Dictionary representing the API's JSON response
+ """
+ if params is None:
+ params = {}
+
+ path = "{resource_name}/tests/{resource_id}".format(resource_name=cls._resource_name, resource_id=id)
+ api_version = getattr(cls, "_api_version", None)
+
+ return APIClient.submit("PUT", path, api_version, body, **params)
+
+
+class ActionAPISyntheticsResource(object):
+ """
+ Actionable Synthetics API Resource
+ """
+
+ @classmethod
+ def _trigger_synthetics_class_action(cls, method, name, id=None, params=None, **body):
+ """
+ Trigger an action
+
+ :param method: HTTP method to use to contact API endpoint
+ :type method: HTTP method string
+
+ :param name: action name
+ :type name: string
+
+ :param id: trigger the action for the specified resource object
+ :type id: id
+
+ :param params: action parameters
+ :type params: dictionary
+
+ :param body: action body
+ :type body: dictionary
+
+ :returns: Dictionary representing the API's JSON response
+ """
+ if params is None:
+ params = {}
+
+ api_version = getattr(cls, "_api_version", None)
+
+ if id is None:
+ path = "{resource_name}/{action_name}".format(resource_name=cls._resource_name, action_name=name)
+ else:
+ path = "{resource_name}/{action_name}/{resource_id}".format(
+ resource_name=cls._resource_name, resource_id=id, action_name=name
+ )
+ if method == "GET":
+ # Do not add body to GET requests, it causes 400 Bad request responses on EU site
+ body = None
+ return APIClient.submit(method, path, api_version, body, **params)
diff --git a/lambdas/aws-dd-forwarder-3.127.0/datadog/api/roles.py b/lambdas/aws-dd-forwarder-3.127.0/datadog/api/roles.py
new file mode 100644
index 0000000..2fce1dd
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/datadog/api/roles.py
@@ -0,0 +1,71 @@
+# Unless explicitly stated otherwise all files in this repository are licensed under the BSD-3-Clause License.
+# This product includes software developed at Datadog (https://www.datadoghq.com/).
+# Copyright 2015-Present Datadog, Inc
+from datadog.api.resources import (
+ ActionAPIResource,
+ CreateableAPIResource,
+ CustomUpdatableAPIResource,
+ DeletableAPIResource,
+ GetableAPIResource,
+ ListableAPIResource,
+)
+
+from datadog.api.api_client import APIClient
+
+
+class Roles(
+ ActionAPIResource,
+ CreateableAPIResource,
+ CustomUpdatableAPIResource,
+ GetableAPIResource,
+ ListableAPIResource,
+ DeletableAPIResource,
+):
+ """
+ A wrapper around Tag HTTP API.
+ """
+
+ _resource_name = "roles"
+ _api_version = "v2"
+
+ @classmethod
+ def update(cls, id, **body):
+ """
+ Update a role's attributes
+
+ :param id: uuid of the role
+ :param body: dict with type of the input, role `id`, and modified attributes
+ :returns: Dictionary representing the API's JSON response
+ """
+ params = {}
+ return super(Roles, cls).update("PATCH", id, params=params, **body)
+
+ @classmethod
+ def assign_permission(cls, id, **body):
+ """
+ Assign permission to a role
+
+ :param id: uuid of the role to assign permission to
+ :param body: dict with "type": "permissions" and uuid of permission to assign
+ :returns: Dictionary representing the API's JSON response
+ """
+ params = {}
+ path = "{resource_name}/{resource_id}/permissions".format(resource_name=cls._resource_name, resource_id=id)
+ api_version = getattr(cls, "_api_version", None)
+
+ return APIClient.submit("POST", path, api_version, body, **params)
+
+ @classmethod
+ def unassign_permission(cls, id, **body):
+ """
+ Unassign permission from a role
+
+ :param id: uuid of the role to unassign permission from
+ :param body: dict with "type": "permissions" and uuid of permission to unassign
+ :returns: Dictionary representing the API's JSON response
+ """
+ params = {}
+ path = "{resource_name}/{resource_id}/permissions".format(resource_name=cls._resource_name, resource_id=id)
+ api_version = getattr(cls, "_api_version", None)
+
+ return APIClient.submit("DELETE", path, api_version, body, **params)
diff --git a/lambdas/aws-dd-forwarder-3.127.0/datadog/api/screenboards.py b/lambdas/aws-dd-forwarder-3.127.0/datadog/api/screenboards.py
new file mode 100644
index 0000000..9367ab7
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/datadog/api/screenboards.py
@@ -0,0 +1,50 @@
+# Unless explicitly stated otherwise all files in this repository are licensed under the BSD-3-Clause License.
+# This product includes software developed at Datadog (https://www.datadoghq.com/).
+# Copyright 2015-Present Datadog, Inc
+from datadog.api.resources import (
+ GetableAPIResource,
+ CreateableAPIResource,
+ UpdatableAPIResource,
+ DeletableAPIResource,
+ ActionAPIResource,
+ ListableAPIResource,
+)
+
+
+class Screenboard(
+ GetableAPIResource,
+ CreateableAPIResource,
+ UpdatableAPIResource,
+ DeletableAPIResource,
+ ActionAPIResource,
+ ListableAPIResource,
+):
+ """
+ A wrapper around Screenboard HTTP API.
+ """
+
+ _resource_name = "screen"
+
+ @classmethod
+ def share(cls, board_id):
+ """
+ Share the screenboard with given id
+
+ :param board_id: screenboard to share
+ :type board_id: id
+
+ :returns: Dictionary representing the API's JSON response
+ """
+ return super(Screenboard, cls)._trigger_action("POST", "screen/share", board_id)
+
+ @classmethod
+ def revoke(cls, board_id):
+ """
+ Revoke a shared screenboard with given id
+
+ :param board_id: screenboard to revoke
+ :type board_id: id
+
+ :returns: Dictionary representing the API's JSON response
+ """
+ return super(Screenboard, cls)._trigger_action("DELETE", "screen/share", board_id)
diff --git a/lambdas/aws-dd-forwarder-3.127.0/datadog/api/service_checks.py b/lambdas/aws-dd-forwarder-3.127.0/datadog/api/service_checks.py
new file mode 100644
index 0000000..72fcb9a
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/datadog/api/service_checks.py
@@ -0,0 +1,45 @@
+# Unless explicitly stated otherwise all files in this repository are licensed under the BSD-3-Clause License.
+# This product includes software developed at Datadog (https://www.datadoghq.com/).
+# Copyright 2015-Present Datadog, Inc
+from datadog.api.constants import CheckStatus
+from datadog.api.exceptions import ApiError
+from datadog.api.resources import ActionAPIResource
+
+
+class ServiceCheck(ActionAPIResource):
+ """
+ A wrapper around ServiceCheck HTTP API.
+ """
+
+ @classmethod
+ def check(cls, **body):
+ """
+ Post check statuses for use with monitors
+
+ :param check: text for the message
+ :type check: string
+
+ :param host_name: name of the host submitting the check
+ :type host_name: string
+
+ :param status: integer for the status of the check
+ :type status: Options: '0': OK, '1': WARNING, '2': CRITICAL, '3': UNKNOWN
+
+ :param timestamp: timestamp of the event
+ :type timestamp: POSIX timestamp
+
+ :param message: description of why this status occurred
+ :type message: string
+
+ :param tags: list of tags for this check
+ :type tags: string list
+
+ :returns: Dictionary representing the API's JSON response
+ """
+
+ # Validate checks, include only non-null values
+ for param, value in body.items():
+ if param == "status" and body[param] not in CheckStatus.ALL:
+ raise ApiError("Invalid status, expected one of: %s" % ", ".join(str(v) for v in CheckStatus.ALL))
+
+ return super(ServiceCheck, cls)._trigger_action("POST", "check_run", **body)
diff --git a/lambdas/aws-dd-forwarder-3.127.0/datadog/api/service_level_objectives.py b/lambdas/aws-dd-forwarder-3.127.0/datadog/api/service_level_objectives.py
new file mode 100644
index 0000000..abb5a5d
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/datadog/api/service_level_objectives.py
@@ -0,0 +1,213 @@
+# Unless explicitly stated otherwise all files in this repository are licensed under the BSD-3-Clause License.
+# This product includes software developed at Datadog (https://www.datadoghq.com/).
+# Copyright 2015-Present Datadog, Inc
+from datadog.util.format import force_to_epoch_seconds
+from datadog.api.resources import (
+ GetableAPIResource,
+ CreateableAPIResource,
+ UpdatableAPIResource,
+ ListableAPIResource,
+ DeletableAPIResource,
+ ActionAPIResource,
+)
+
+
+class ServiceLevelObjective(
+ GetableAPIResource,
+ CreateableAPIResource,
+ UpdatableAPIResource,
+ ListableAPIResource,
+ DeletableAPIResource,
+ ActionAPIResource,
+):
+ """
+ A wrapper around Service Level Objective HTTP API.
+ """
+
+ _resource_name = "slo"
+
+ @classmethod
+ def create(cls, attach_host_name=False, method="POST", id=None, params=None, **body):
+ """
+ Create a SLO
+
+ :returns: created SLO details
+ """
+ return super(ServiceLevelObjective, cls).create(
+ attach_host_name=False, method="POST", id=None, params=params, **body
+ )
+
+ @classmethod
+ def get(cls, id, **params):
+ """
+ Get a specific SLO details.
+
+ :param id: SLO id to get details for
+ :type id: str
+
+ :returns: SLO details
+ """
+ return super(ServiceLevelObjective, cls).get(id, **params)
+
+ @classmethod
+ def get_all(cls, query=None, tags_query=None, metrics_query=None, ids=None, offset=0, limit=100, **params):
+ """
+ Get all SLO details.
+
+ :param query: optional search query to filter results for SLO name
+ :type query: str
+
+ :param tags_query: optional search query to filter results for a single SLO tag
+ :type query: str
+
+ :param metrics_query: optional search query to filter results based on SLO numerator and denominator
+ :type query: str
+
+ :param ids: optional list of SLO ids to get many specific SLOs at once.
+ :type ids: list(str)
+
+ :param offset: offset of results to use (default 0)
+ :type offset: int
+
+ :param limit: limit of results to return (default: 100)
+ :type limit: int
+
+ :returns: SLOs matching the query
+ """
+ search_terms = {}
+ if query:
+ search_terms["query"] = query
+ if ids:
+ search_terms["ids"] = ids
+ if tags_query:
+ search_terms["tags_query"] = tags_query
+ if metrics_query:
+ search_terms["metrics_query"] = metrics_query
+ search_terms["offset"] = offset
+ search_terms["limit"] = limit
+
+ return super(ServiceLevelObjective, cls).get_all(**search_terms)
+
+ @classmethod
+ def update(cls, id, params=None, **body):
+ """
+ Update a specific SLO details.
+
+ :param id: SLO id to update details for
+ :type id: str
+
+ :returns: SLO details
+ """
+ return super(ServiceLevelObjective, cls).update(id, params, **body)
+
+ @classmethod
+ def delete(cls, id, **params):
+ """
+ Delete a specific SLO.
+
+ :param id: SLO id to delete
+ :type id: str
+
+ :returns: SLO ids removed
+ """
+ return super(ServiceLevelObjective, cls).delete(id, **params)
+
+ @classmethod
+ def bulk_delete(cls, ops, **params):
+ """
+ Bulk Delete Timeframes from multiple SLOs.
+
+ :param ops: a dictionary mapping of SLO ID to timeframes to remove.
+ :type ops: dict(str, list(str))
+
+ :returns: Dictionary representing the API's JSON response
+ `errors` - errors with operation
+ `data` - updates and deletions
+ """
+ return super(ServiceLevelObjective, cls)._trigger_class_action(
+ "POST",
+ "bulk_delete",
+ body=ops,
+ params=params,
+ suppress_response_errors_on_codes=[200],
+ )
+
+ @classmethod
+ def delete_many(cls, ids, **params):
+ """
+ Delete Multiple SLOs
+
+ :param ids: a list of SLO IDs to remove
+ :type ids: list(str)
+
+ :returns: Dictionary representing the API's JSON response see `data` list(slo ids) && `errors`
+ """
+ return super(ServiceLevelObjective, cls)._trigger_class_action(
+ "DELETE",
+ "",
+ params=params,
+ body={"ids": ids},
+ suppress_response_errors_on_codes=[200],
+ )
+
+ @classmethod
+ def can_delete(cls, ids, **params):
+ """
+ Check if the following SLOs can be safely deleted.
+
+ This is used to check if SLO has any references to it.
+
+ :param ids: a list of SLO IDs to check
+ :type ids: list(str)
+
+ :returns: Dictionary representing the API's JSON response
+ "data.ok" represents a list of SLO ids that have no known references.
+ "errors" contains a dictionary of SLO ID to known reference(s).
+ """
+ params["ids"] = ids
+ return super(ServiceLevelObjective, cls)._trigger_class_action(
+ "GET",
+ "can_delete",
+ params=params,
+ body=None,
+ suppress_response_errors_on_codes=[200],
+ )
+
+ @classmethod
+ def history(cls, id, from_ts, to_ts, **params):
+ """
+ Get the SLO's history from the given time range.
+
+ :param id: SLO ID to query
+ :type id: str
+
+ :param from_ts: `from` timestamp in epoch seconds to query
+ :type from_ts: int|datetime.datetime
+
+ :param to_ts: `to` timestamp in epoch seconds to query, must be > `from_ts`
+ :type to_ts: int|datetime.datetime
+
+ :returns: Dictionary representing the API's JSON response
+ "data.ok" represents a list of SLO ids that have no known references.
+ "errors" contains a dictionary of SLO ID to known reference(s).
+ """
+ params["id"] = id
+ params["from_ts"] = force_to_epoch_seconds(from_ts)
+ params["to_ts"] = force_to_epoch_seconds(to_ts)
+ return super(ServiceLevelObjective, cls)._trigger_class_action(
+ "GET",
+ "history",
+ id=id,
+ params=params,
+ body=None,
+ suppress_response_errors_on_codes=[200],
+ )
+
+ @classmethod
+ def search(cls, **params):
+ """
+ Search SLOs.
+
+ :returns: Dictionary representing the API's JSON response
+ """
+ return super(ServiceLevelObjective, cls)._trigger_class_action("GET", "search", params=params)
diff --git a/lambdas/aws-dd-forwarder-3.127.0/datadog/api/synthetics.py b/lambdas/aws-dd-forwarder-3.127.0/datadog/api/synthetics.py
new file mode 100644
index 0000000..88c0e3a
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/datadog/api/synthetics.py
@@ -0,0 +1,214 @@
+# Unless explicitly stated otherwise all files in this repository are licensed under the BSD-3-Clause License.
+# This product includes software developed at Datadog (https://www.datadoghq.com/).
+# Copyright 2015-Present Datadog, Inc
+from datadog.api.exceptions import ApiError
+from datadog.api.resources import (
+ CreateableAPIResource,
+ GetableAPIResource,
+ ActionAPIResource,
+ UpdatableAPISyntheticsResource,
+ UpdatableAPISyntheticsSubResource,
+ ActionAPISyntheticsResource,
+)
+
+
+class Synthetics(
+ ActionAPIResource,
+ ActionAPISyntheticsResource,
+ CreateableAPIResource,
+ GetableAPIResource,
+ UpdatableAPISyntheticsResource,
+ UpdatableAPISyntheticsSubResource,
+):
+ """
+ A wrapper around Sythetics HTTP API.
+ """
+
+ _resource_name = "synthetics"
+ _sub_resource_name = "status"
+
+ @classmethod
+ def get_test(cls, id, **params):
+ """
+ Get test's details.
+
+ :param id: public id of the test to retrieve
+ :type id: string
+
+ :returns: Dictionary representing the API's JSON response
+ """
+
+ # API path = "synthetics/tests/
+
+ name = "tests"
+
+ return super(Synthetics, cls)._trigger_synthetics_class_action("GET", id=id, name=name, params=params)
+
+ @classmethod
+ def get_all_tests(cls, **params):
+ """
+ Get all tests' details.
+
+ :returns: Dictionary representing the API's JSON response
+ """
+
+ for p in ["locations", "tags"]:
+ if p in params and isinstance(params[p], list):
+ params[p] = ",".join(params[p])
+
+ # API path = "synthetics/tests"
+
+ return super(Synthetics, cls).get(id="tests", params=params)
+
+ @classmethod
+ def get_devices(cls, **params):
+ """
+ Get a list of devices for browser checks
+
+ :returns: Dictionary representing the API's JSON response
+ """
+
+ # API path = "synthetics/browser/devices"
+
+ name = "browser/devices"
+
+ return super(Synthetics, cls)._trigger_synthetics_class_action("GET", name=name, params=params)
+
+ @classmethod
+ def get_locations(cls, **params):
+ """
+ Get a list of all available locations
+
+ :return: Dictionary representing the API's JSON response
+ """
+
+ name = "locations"
+
+ # API path = "synthetics/locations
+
+ return super(Synthetics, cls)._trigger_synthetics_class_action("GET", name=name, params=params)
+
+ @classmethod
+ def get_results(cls, id, **params):
+ """
+ Get the most recent results for a test
+
+ :param id: public id of the test to retrieve results for
+ :type id: id
+
+ :return: Dictionary representing the API's JSON response
+ """
+
+ # API path = "synthetics/tests//results
+
+ path = "tests/{}/results".format(id)
+
+ return super(Synthetics, cls)._trigger_synthetics_class_action("GET", path, params=params)
+
+ @classmethod
+ def get_result(cls, id, result_id, **params):
+ """
+ Get a specific result for a given test.
+
+ :param id: public ID of the test to retrieve the most recent result for
+ :type id: id
+
+ :param result_id: result ID of the test to retrieve the most recent result for
+ :type result_id: id
+
+ :returns: Dictionary representing the API's JSON response
+ """
+
+ # API path = "synthetics/tests/results/
+
+ path = "tests/{}/results/{}".format(id, result_id)
+
+ return super(Synthetics, cls)._trigger_synthetics_class_action("GET", path, params=params)
+
+ @classmethod
+ def create_test(cls, **params):
+ """
+ Create a test
+
+ :param name: A unique name for the test
+ :type name: string
+
+ :param type: The type of test. Valid values are api and browser
+ :type type: string
+
+ :param subtype: required for SSL test - For a SSL API test, specify ssl as the value.
+ :Otherwise, you should omit this argument.
+ :type subtype: string
+
+ :param config: The test configuration, contains the request specification and the assertions.
+ :type config: dict
+
+ :param options: List of options to customize the test
+ :type options: dict
+
+ :param message: A description of the test
+ :type message: string
+
+ :param locations: A list of the locations to send the tests from
+ :type locations: list
+
+ :param tags: A list of tags used to filter the test
+ :type tags: list
+
+ :return: Dictionary representing the API's JSON response
+ """
+
+ # API path = "synthetics/tests"
+
+ return super(Synthetics, cls).create(id="tests", **params)
+
+ @classmethod
+ def edit_test(cls, id, **params):
+ """
+ Edit a test
+
+ :param id: Public id of the test to edit
+ :type id: string
+
+ :return: Dictionary representing the API's JSON response
+ """
+
+ # API path = "synthetics/tests/"
+
+ return super(Synthetics, cls).update_synthetics(id=id, **params)
+
+ @classmethod
+ def start_or_pause_test(cls, id, **body):
+ """
+ Pause a given test
+
+ :param id: public id of the test to pause
+ :type id: string
+
+ :param new_status: mew status for the test
+ :type id: string
+
+ :returns: Dictionary representing the API's JSON response
+ """
+
+ # API path = "synthetics/tests//status"
+
+ return super(Synthetics, cls).update_synthetics_items(id=id, **body)
+
+ @classmethod
+ def delete_test(cls, **body):
+ """
+ Delete a test
+
+ :param public_ids: list of public IDs to delete corresponding tests
+ :type public_ids: list of strings
+
+ :return: Dictionary representing the API's JSON response
+ """
+
+ if not isinstance(body["public_ids"], list):
+ raise ApiError("Parameter 'public_ids' must be a list")
+
+ # API path = "synthetics/tests/delete
+
+ return super(Synthetics, cls)._trigger_action("POST", name="synthetics", id="tests/delete", **body)
diff --git a/lambdas/aws-dd-forwarder-3.127.0/datadog/api/tags.py b/lambdas/aws-dd-forwarder-3.127.0/datadog/api/tags.py
new file mode 100644
index 0000000..2226cdb
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/datadog/api/tags.py
@@ -0,0 +1,54 @@
+# Unless explicitly stated otherwise all files in this repository are licensed under the BSD-3-Clause License.
+# This product includes software developed at Datadog (https://www.datadoghq.com/).
+# Copyright 2015-Present Datadog, Inc
+from datadog.api.resources import (
+ CreateableAPIResource,
+ UpdatableAPIResource,
+ DeletableAPIResource,
+ GetableAPIResource,
+ ListableAPIResource,
+)
+
+
+class Tag(CreateableAPIResource, UpdatableAPIResource, GetableAPIResource, ListableAPIResource, DeletableAPIResource):
+ """
+ A wrapper around Tag HTTP API.
+ """
+
+ _resource_name = "tags/hosts"
+
+ @classmethod
+ def create(cls, host, **body):
+ """
+ Add tags to a host
+
+ :param tags: list of tags to apply to the host
+ :type tags: string list
+
+ :param source: source of the tags
+ :type source: string
+
+ :returns: Dictionary representing the API's JSON response
+ """
+ params = {}
+ if "source" in body:
+ params["source"] = body["source"]
+ return super(Tag, cls).create(id=host, params=params, **body)
+
+ @classmethod
+ def update(cls, host, **body):
+ """
+ Update all tags for a given host
+
+ :param tags: list of tags to apply to the host
+ :type tags: string list
+
+ :param source: source of the tags
+ :type source: string
+
+ :returns: Dictionary representing the API's JSON response
+ """
+ params = {}
+ if "source" in body:
+ params["source"] = body["source"]
+ return super(Tag, cls).update(id=host, params=params, **body)
diff --git a/lambdas/aws-dd-forwarder-3.127.0/datadog/api/timeboards.py b/lambdas/aws-dd-forwarder-3.127.0/datadog/api/timeboards.py
new file mode 100644
index 0000000..42d34da
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/datadog/api/timeboards.py
@@ -0,0 +1,20 @@
+# Unless explicitly stated otherwise all files in this repository are licensed under the BSD-3-Clause License.
+# This product includes software developed at Datadog (https://www.datadoghq.com/).
+# Copyright 2015-Present Datadog, Inc
+from datadog.api.resources import (
+ GetableAPIResource,
+ CreateableAPIResource,
+ UpdatableAPIResource,
+ ListableAPIResource,
+ DeletableAPIResource,
+)
+
+
+class Timeboard(
+ GetableAPIResource, CreateableAPIResource, UpdatableAPIResource, ListableAPIResource, DeletableAPIResource
+):
+ """
+ A wrapper around Timeboard HTTP API.
+ """
+
+ _resource_name = "dash"
diff --git a/lambdas/aws-dd-forwarder-3.127.0/datadog/api/users.py b/lambdas/aws-dd-forwarder-3.127.0/datadog/api/users.py
new file mode 100644
index 0000000..ff0b2f2
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/datadog/api/users.py
@@ -0,0 +1,50 @@
+# Unless explicitly stated otherwise all files in this repository are licensed under the BSD-3-Clause License.
+# This product includes software developed at Datadog (https://www.datadoghq.com/).
+# Copyright 2015-Present Datadog, Inc
+from datadog.api.resources import (
+ ActionAPIResource,
+ GetableAPIResource,
+ CreateableAPIResource,
+ UpdatableAPIResource,
+ ListableAPIResource,
+ DeletableAPIResource,
+)
+
+
+class User(
+ ActionAPIResource,
+ GetableAPIResource,
+ CreateableAPIResource,
+ UpdatableAPIResource,
+ ListableAPIResource,
+ DeletableAPIResource,
+):
+
+ _resource_name = "user"
+
+ """
+ A wrapper around User HTTP API.
+ """
+
+ @classmethod
+ def invite(cls, emails):
+ """
+ Send an invite to join datadog to each of the email addresses in the
+ *emails* list. If *emails* is a string, it will be wrapped in a list and
+ sent. Returns a list of email addresses for which an email was sent.
+
+ :param emails: emails addresses to invite to join datadog
+ :type emails: string list
+
+ :returns: Dictionary representing the API's JSON response
+ """
+ print("[DEPRECATION] User.invite() is deprecated. Use `create` instead.")
+
+ if not isinstance(emails, list):
+ emails = [emails]
+
+ body = {
+ "emails": emails,
+ }
+
+ return super(User, cls)._trigger_action("POST", "/invite_users", **body)
diff --git a/lambdas/aws-dd-forwarder-3.127.0/datadog/dogshell/__init__.py b/lambdas/aws-dd-forwarder-3.127.0/datadog/dogshell/__init__.py
new file mode 100644
index 0000000..cb4aab6
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/datadog/dogshell/__init__.py
@@ -0,0 +1,113 @@
+# Unless explicitly stated otherwise all files in this repository are licensed under the BSD-3-Clause License.
+# This product includes software developed at Datadog (https://www.datadoghq.com/).
+# Copyright 2015-Present Datadog, Inc
+# stdlib
+import os
+import warnings
+import sys
+
+# 3p
+import argparse
+
+# datadog
+from datadog import initialize, __version__
+from datadog.dogshell.comment import CommentClient
+from datadog.dogshell.common import DogshellConfig
+from datadog.dogshell.dashboard_list import DashboardListClient
+from datadog.dogshell.downtime import DowntimeClient
+from datadog.dogshell.event import EventClient
+from datadog.dogshell.host import HostClient
+from datadog.dogshell.metric import MetricClient
+from datadog.dogshell.monitor import MonitorClient
+from datadog.dogshell.screenboard import ScreenboardClient
+from datadog.dogshell.search import SearchClient
+from datadog.dogshell.service_check import ServiceCheckClient
+from datadog.dogshell.service_level_objective import ServiceLevelObjectiveClient
+from datadog.dogshell.tag import TagClient
+from datadog.dogshell.timeboard import TimeboardClient
+from datadog.dogshell.dashboard import DashboardClient
+
+
+def main():
+ if sys.argv[0].endswith("dog"):
+ warnings.warn("dog is pending deprecation. Please use dogshell instead.", PendingDeprecationWarning)
+
+ parser = argparse.ArgumentParser(
+ description="Interact with the Datadog API", formatter_class=argparse.ArgumentDefaultsHelpFormatter
+ )
+ parser.add_argument(
+ "--config", help="location of your dogrc file (default ~/.dogrc)", default=os.path.expanduser("~/.dogrc")
+ )
+ parser.add_argument(
+ "--api-key",
+ help="your API key, from "
+ "https://app.datadoghq.com/account/settings#api. "
+ "You can also set the environment variables DATADOG_API_KEY or DD_API_KEY",
+ dest="api_key",
+ default=os.environ.get("DATADOG_API_KEY", os.environ.get("DD_API_KEY")),
+ )
+ parser.add_argument(
+ "--application-key",
+ help="your Application key, from "
+ "https://app.datadoghq.com/account/settings#api. "
+ "You can also set the environment variables DATADOG_APP_KEY or DD_APP_KEY",
+ dest="app_key",
+ default=os.environ.get("DATADOG_APP_KEY", os.environ.get("DD_APP_KEY")),
+ )
+ parser.add_argument(
+ "--pretty",
+ help="pretty-print output (suitable for human consumption, " "less useful for scripting)",
+ dest="format",
+ action="store_const",
+ const="pretty",
+ )
+ parser.add_argument(
+ "--raw", help="raw JSON as returned by the HTTP service", dest="format", action="store_const", const="raw"
+ )
+ parser.add_argument(
+ "--timeout", help="time to wait in seconds before timing" " out an API call (default 10)", default=10, type=int
+ )
+ parser.add_argument(
+ "-v", "--version", help="Dog API version", action="version", version="%(prog)s {0}".format(__version__)
+ )
+
+ parser.add_argument(
+ "--api_host",
+ help="Datadog site to send data, us (datadoghq.com), eu (datadoghq.eu), us3 (us3.datadoghq.com), \
+ us5 (us5.datadoghq.com), ap1 (ap1.datadoghq.com), gov (ddog-gov.com), or custom url. default: us",
+ dest="api_host",
+ )
+
+ config = DogshellConfig()
+
+ # Set up subparsers for each service
+ subparsers = parser.add_subparsers(title="Modes", dest="mode")
+ subparsers.required = True
+
+ CommentClient.setup_parser(subparsers)
+ SearchClient.setup_parser(subparsers)
+ MetricClient.setup_parser(subparsers)
+ TagClient.setup_parser(subparsers)
+ EventClient.setup_parser(subparsers)
+ MonitorClient.setup_parser(subparsers)
+ TimeboardClient.setup_parser(subparsers)
+ DashboardClient.setup_parser(subparsers)
+ ScreenboardClient.setup_parser(subparsers)
+ DashboardListClient.setup_parser(subparsers)
+ HostClient.setup_parser(subparsers)
+ DowntimeClient.setup_parser(subparsers)
+ ServiceCheckClient.setup_parser(subparsers)
+ ServiceLevelObjectiveClient.setup_parser(subparsers)
+
+ args = parser.parse_args()
+
+ config.load(args.config, args.api_key, args.app_key, args.api_host)
+
+ # Initialize datadog.api package
+ initialize(**config)
+
+ args.func(args)
+
+
+if __name__ == "__main__":
+ main()
diff --git a/lambdas/aws-dd-forwarder-3.127.0/datadog/dogshell/comment.py b/lambdas/aws-dd-forwarder-3.127.0/datadog/dogshell/comment.py
new file mode 100644
index 0000000..208d009
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/datadog/dogshell/comment.py
@@ -0,0 +1,152 @@
+# Unless explicitly stated otherwise all files in this repository are licensed under the BSD-3-Clause License.
+# This product includes software developed at Datadog (https://www.datadoghq.com/).
+# Copyright 2015-Present Datadog, Inc
+# stdlib
+import json
+import sys
+
+# datadog
+from datadog import api
+from datadog.dogshell.common import report_errors, report_warnings
+
+
+class CommentClient(object):
+ @classmethod
+ def setup_parser(cls, subparsers):
+ parser = subparsers.add_parser("comment", help="Post, update, and delete comments.")
+
+ verb_parsers = parser.add_subparsers(title="Verbs", dest="verb")
+ verb_parsers.required = True
+
+ post_parser = verb_parsers.add_parser("post", help="Post comments.")
+ post_parser.add_argument("handle", help="handle to post as.")
+ post_parser.add_argument("comment", help="comment message to post. if unset," " reads from stdin.", nargs="?")
+ post_parser.set_defaults(func=cls._post)
+
+ update_parser = verb_parsers.add_parser("update", help="Update existing comments.")
+ update_parser.add_argument("comment_id", help="comment to update (by id)")
+ update_parser.add_argument("handle", help="handle to post as.")
+ update_parser.add_argument("comment", help="comment message to post." " if unset, reads from stdin.", nargs="?")
+ update_parser.set_defaults(func=cls._update)
+
+ reply_parser = verb_parsers.add_parser("reply", help="Reply to existing comments.")
+ reply_parser.add_argument("comment_id", help="comment to reply to (by id)")
+ reply_parser.add_argument("handle", help="handle to post as.")
+ reply_parser.add_argument("comment", help="comment message to post." " if unset, reads from stdin.", nargs="?")
+ reply_parser.set_defaults(func=cls._reply)
+
+ show_parser = verb_parsers.add_parser("show", help="Show comment details.")
+ show_parser.add_argument("comment_id", help="comment to show")
+ show_parser.set_defaults(func=cls._show)
+
+ @classmethod
+ def _post(cls, args):
+ api._timeout = args.timeout
+ handle = args.handle
+ comment = args.comment
+ format = args.format
+ if comment is None:
+ comment = sys.stdin.read()
+ res = api.Comment.create(handle=handle, message=comment)
+ report_warnings(res)
+ report_errors(res)
+ if format == "pretty":
+ message = res["comment"]["message"]
+ lines = message.split("\n")
+ message = "\n".join([" " + line for line in lines])
+ print("id\t\t" + str(res["comment"]["id"]))
+ print("url\t\t" + res["comment"]["url"])
+ print("resource\t" + res["comment"]["resource"])
+ print("handle\t\t" + res["comment"]["handle"])
+ print("message\n" + message)
+ elif format == "raw":
+ print(json.dumps(res))
+ else:
+ print("id\t\t" + str(res["comment"]["id"]))
+ print("url\t\t" + res["comment"]["url"])
+ print("resource\t" + res["comment"]["resource"])
+ print("handle\t\t" + res["comment"]["handle"])
+ print("message\t\t" + res["comment"]["message"].__repr__())
+
+ @classmethod
+ def _update(cls, args):
+ handle = args.handle
+ comment = args.comment
+ id = args.comment_id
+ format = args.format
+ if comment is None:
+ comment = sys.stdin.read()
+ res = api.Comment.update(id, handle=handle, message=comment)
+ report_warnings(res)
+ report_errors(res)
+ if format == "pretty":
+ message = res["comment"]["message"]
+ lines = message.split("\n")
+ message = "\n".join([" " + line for line in lines])
+ print("id\t\t" + str(res["comment"]["id"]))
+ print("url\t\t" + res["comment"]["url"])
+ print("resource\t" + res["comment"]["resource"])
+ print("handle\t\t" + res["comment"]["handle"])
+ print("message\n" + message)
+ elif format == "raw":
+ print(json.dumps(res))
+ else:
+ print("id\t\t" + str(res["comment"]["id"]))
+ print("url\t\t" + res["comment"]["url"])
+ print("resource\t" + res["comment"]["resource"])
+ print("handle\t\t" + res["comment"]["handle"])
+ print("message\t\t" + res["comment"]["message"].__repr__())
+
+ @classmethod
+ def _reply(cls, args):
+ api._timeout = args.timeout
+ handle = args.handle
+ comment = args.comment
+ id = args.comment_id
+ format = args.format
+ if comment is None:
+ comment = sys.stdin.read()
+ res = api.Comment.create(handle=handle, message=comment, related_event_id=id)
+ report_warnings(res)
+ report_errors(res)
+ if format == "pretty":
+ message = res["comment"]["message"]
+ lines = message.split("\n")
+ message = "\n".join([" " + line for line in lines])
+ print("id\t\t" + str(res["comment"]["id"]))
+ print("url\t\t" + res["comment"]["url"])
+ print("resource\t" + res["comment"]["resource"])
+ print("handle\t\t" + res["comment"]["handle"])
+ print("message\n" + message)
+ elif format == "raw":
+ print(json.dumps(res))
+ else:
+ print("id\t\t" + str(res["comment"]["id"]))
+ print("url\t\t" + res["comment"]["url"])
+ print("resource\t" + res["comment"]["resource"])
+ print("handle\t\t" + res["comment"]["handle"])
+ print("message\t\t" + res["comment"]["message"].__repr__())
+
+ @classmethod
+ def _show(cls, args):
+ api._timeout = args.timeout
+ id = args.comment_id
+ format = args.format
+ res = api.Event.get(id)
+ report_warnings(res)
+ report_errors(res)
+ if format == "pretty":
+ message = res["event"]["text"]
+ lines = message.split("\n")
+ message = "\n".join([" " + line for line in lines])
+ print("id\t\t" + str(res["event"]["id"]))
+ print("url\t\t" + res["event"]["url"])
+ print("resource\t" + res["event"]["resource"])
+ print("message\n" + message)
+ elif format == "raw":
+ print(json.dumps(res))
+ else:
+ print("id\t\t" + str(res["event"]["id"]))
+ print("url\t\t" + res["event"]["url"])
+ print("resource\t" + res["event"]["resource"])
+ print("message\t\t" + res["event"]["text"].__repr__())
diff --git a/lambdas/aws-dd-forwarder-3.127.0/datadog/dogshell/common.py b/lambdas/aws-dd-forwarder-3.127.0/datadog/dogshell/common.py
new file mode 100644
index 0000000..251e658
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/datadog/dogshell/common.py
@@ -0,0 +1,122 @@
+# Unless explicitly stated otherwise all files in this repository are licensed under the BSD-3-Clause License.
+# This product includes software developed at Datadog (https://www.datadoghq.com/).
+# Copyright 2015-Present Datadog, Inc
+# stdlib
+from __future__ import print_function
+import os
+import sys
+
+# datadog
+from datadog.util.compat import is_p3k, configparser, IterableUserDict, get_input
+
+
+def print_err(msg):
+ if is_p3k():
+ print(msg + "\n", file=sys.stderr)
+ else:
+ sys.stderr.write(msg + "\n")
+ sys.stderr.flush()
+
+
+def report_errors(res):
+ if "errors" in res:
+ errors = res["errors"]
+ if isinstance(errors, list):
+ for error in errors:
+ print_err("ERROR: {}".format(error))
+ else:
+ print_err("ERROR: {}".format(errors))
+ sys.exit(1)
+ return False
+
+
+def report_warnings(res):
+ if "warnings" in res:
+ warnings = res["warnings"]
+ if isinstance(warnings, list):
+ for warning in warnings:
+ print_err("WARNING: {}".format(warning))
+ else:
+ print_err("WARNING: {}".format(warnings))
+ return True
+ return False
+
+
+class DogshellConfig(IterableUserDict):
+ def load(self, config_file, api_key, app_key, api_host):
+ config = configparser.ConfigParser()
+
+ if api_host is not None:
+ if api_host in ("datadoghq.com", "us"):
+ self["api_host"] = "https://api.datadoghq.com"
+ elif api_host in ("datadoghq.eu", "eu"):
+ self["api_host"] = "https://api.datadoghq.eu"
+ elif api_host in ("us3.datadoghq.com", "us3"):
+ self["api_host"] = "https://api.us3.datadoghq.com"
+ elif api_host in ("us5.datadoghq.com", "us5"):
+ self["api_host"] = "https://api.us5.datadoghq.com"
+ elif api_host in ("ap1.datadoghq.com", "ap1"):
+ self["api_host"] = "https://api.ap1.datadoghq.com"
+ elif api_host in ("ddog-gov.com", "gov"):
+ self["api_host"] = "https://api.ddog-gov.com"
+ else:
+ self["api_host"] = api_host
+ if api_key is not None and app_key is not None:
+ self["api_key"] = api_key
+ self["app_key"] = app_key
+ else:
+ if os.access(config_file, os.F_OK):
+ config.read(config_file)
+ if not config.has_section("Connection"):
+ report_errors({"errors": ["%s has no [Connection] section" % config_file]})
+ else:
+ try:
+ response = None
+ while response is None or response.strip().lower() not in ["", "y", "n"]:
+ response = get_input("%s does not exist. Would you like to" " create it? [Y/n] " % config_file)
+ if response.strip().lower() in ["", "y"]:
+ # Read the api and app keys from stdin
+ while True:
+ api_key = get_input(
+ "What is your api key? (Get it here: "
+ "https://app.datadoghq.com/account/settings#api) "
+ )
+ if api_key.isalnum():
+ break
+ print("Datadog api keys can only contain alphanumeric characters.")
+ while True:
+ app_key = get_input(
+ "What is your app key? (Get it here: "
+ "https://app.datadoghq.com/account/settings#api) "
+ )
+ if app_key.isalnum():
+ break
+ print("Datadog app keys can only contain alphanumeric characters.")
+
+ # Write the config file
+ config.add_section("Connection")
+ config.set("Connection", "apikey", api_key)
+ config.set("Connection", "appkey", app_key)
+
+ f = open(config_file, "w")
+ config.write(f)
+ f.close()
+ print("Wrote %s" % config_file)
+ elif response.strip().lower() == "n":
+ # Abort
+ print_err("Exiting\n")
+ sys.exit(1)
+ except (KeyboardInterrupt, EOFError):
+ # Abort
+ print_err("\nExiting")
+ sys.exit(1)
+
+ self["api_key"] = config.get("Connection", "apikey")
+ self["app_key"] = config.get("Connection", "appkey")
+ if config.has_section("Proxy"):
+ self["proxies"] = dict(config.items("Proxy"))
+ if config.has_option("Connection", "host_name"):
+ self["host_name"] = config.get("Connection", "host_name")
+ if config.has_option("Connection", "api_host"):
+ self["api_host"] = config.get("Connection", "api_host")
+ assert self["api_key"] is not None and self["app_key"] is not None
diff --git a/lambdas/aws-dd-forwarder-3.127.0/datadog/dogshell/dashboard.py b/lambdas/aws-dd-forwarder-3.127.0/datadog/dogshell/dashboard.py
new file mode 100644
index 0000000..bc37bd6
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/datadog/dogshell/dashboard.py
@@ -0,0 +1,174 @@
+# Unless explicitly stated otherwise all files in this repository are licensed under the BSD-3-Clause License.
+# This product includes software developed at Datadog (https://www.datadoghq.com/).
+# Copyright 2015-Present Datadog, Inc
+# stdlib
+import json
+import sys
+
+# 3p
+import argparse
+
+# datadog
+from datadog import api
+from datadog.dogshell.common import report_errors, report_warnings
+from datadog.util.format import pretty_json
+
+
+class DashboardClient(object):
+ @classmethod
+ def setup_parser(cls, subparsers):
+ parser = subparsers.add_parser("dashboard", help="Create, edit, and delete dashboards")
+
+ verb_parsers = parser.add_subparsers(title="Verbs", dest="verb")
+ verb_parsers.required = True
+
+ post_parser = verb_parsers.add_parser("post", help="Create dashboards")
+ # Required arguments:
+ post_parser.add_argument("title", help="title for the new dashboard")
+ post_parser.add_argument(
+ "widgets", help="widget definitions as a JSON string. If unset," " reads from stdin.", nargs="?"
+ )
+ post_parser.add_argument("layout_type", choices=["ordered", "free"], help="Layout type of the dashboard.")
+ # Optional arguments:
+ post_parser.add_argument("--description", help="Short description of the dashboard")
+ post_parser.add_argument(
+ "--read_only",
+ help="Whether this dashboard is read-only. " "If True, only the author and admins can make changes to it.",
+ action="store_true",
+ )
+ post_parser.add_argument(
+ "--notify_list",
+ type=_json_string,
+ help="A json list of user handles, e.g. " '\'["user1@domain.com", "user2@domain.com"]\'',
+ )
+ post_parser.add_argument(
+ "--template_variables",
+ type=_json_string,
+ help="A json list of template variable dicts, e.g. "
+ '\'[{"name": "host", "prefix": "host", '
+ '"default": "my-host"}]\'',
+ )
+ post_parser.set_defaults(func=cls._post)
+
+ update_parser = verb_parsers.add_parser("update", help="Update existing dashboards")
+ # Required arguments:
+ update_parser.add_argument("dashboard_id", help="Dashboard to replace" " with the new definition")
+ update_parser.add_argument("title", help="New title for the dashboard")
+ update_parser.add_argument(
+ "widgets", help="Widget definitions as a JSON string." " If unset, reads from stdin", nargs="?"
+ )
+ update_parser.add_argument("layout_type", choices=["ordered", "free"], help="Layout type of the dashboard.")
+ # Optional arguments:
+ update_parser.add_argument("--description", help="Short description of the dashboard")
+ update_parser.add_argument(
+ "--read_only",
+ help="Whether this dashboard is read-only. " "If True, only the author and admins can make changes to it.",
+ action="store_true",
+ )
+ update_parser.add_argument(
+ "--notify_list",
+ type=_json_string,
+ help="A json list of user handles, e.g. " '\'["user1@domain.com", "user2@domain.com"]\'',
+ )
+ update_parser.add_argument(
+ "--template_variables",
+ type=_json_string,
+ help="A json list of template variable dicts, e.g. "
+ '\'[{"name": "host", "prefix": "host", '
+ '"default": "my-host"}]\'',
+ )
+ update_parser.set_defaults(func=cls._update)
+
+ show_parser = verb_parsers.add_parser("show", help="Show a dashboard definition")
+ show_parser.add_argument("dashboard_id", help="Dashboard to show")
+ show_parser.set_defaults(func=cls._show)
+
+ delete_parser = verb_parsers.add_parser("delete", help="Delete dashboards")
+ delete_parser.add_argument("dashboard_id", help="Dashboard to delete")
+ delete_parser.set_defaults(func=cls._delete)
+
+ @classmethod
+ def _post(cls, args):
+ api._timeout = args.timeout
+ format = args.format
+ widgets = args.widgets
+ if args.widgets is None:
+ widgets = sys.stdin.read()
+ widgets = json.loads(widgets)
+
+ # Required arguments
+ payload = {"title": args.title, "widgets": widgets, "layout_type": args.layout_type}
+ # Optional arguments
+ if args.description:
+ payload["description"] = args.description
+ if args.read_only:
+ payload["is_read_only"] = args.read_only
+ if args.notify_list:
+ payload["notify_list"] = args.notify_list
+ if args.template_variables:
+ payload["template_variables"] = args.template_variables
+
+ res = api.Dashboard.create(**payload)
+ report_warnings(res)
+ report_errors(res)
+ if format == "pretty":
+ print(pretty_json(res))
+ else:
+ print(json.dumps(res))
+
+ @classmethod
+ def _update(cls, args):
+ api._timeout = args.timeout
+ format = args.format
+ widgets = args.widgets
+ if args.widgets is None:
+ widgets = sys.stdin.read()
+ widgets = json.loads(widgets)
+
+ # Required arguments
+ payload = {"title": args.title, "widgets": widgets, "layout_type": args.layout_type}
+ # Optional arguments
+ if args.description:
+ payload["description"] = args.description
+ if args.read_only:
+ payload["is_read_only"] = args.read_only
+ if args.notify_list:
+ payload["notify_list"] = args.notify_list
+ if args.template_variables:
+ payload["template_variables"] = args.template_variables
+
+ res = api.Dashboard.update(args.dashboard_id, **payload)
+ report_warnings(res)
+ report_errors(res)
+ if format == "pretty":
+ print(pretty_json(res))
+ else:
+ print(json.dumps(res))
+
+ @classmethod
+ def _show(cls, args):
+ api._timeout = args.timeout
+ format = args.format
+ res = api.Dashboard.get(args.dashboard_id)
+ report_warnings(res)
+ report_errors(res)
+
+ if format == "pretty":
+ print(pretty_json(res))
+ else:
+ print(json.dumps(res))
+
+ @classmethod
+ def _delete(cls, args):
+ api._timeout = args.timeout
+ res = api.Dashboard.delete(args.dashboard_id)
+ if res is not None:
+ report_warnings(res)
+ report_errors(res)
+
+
+def _json_string(str):
+ try:
+ return json.loads(str)
+ except Exception:
+ raise argparse.ArgumentTypeError("bad json parameter")
diff --git a/lambdas/aws-dd-forwarder-3.127.0/datadog/dogshell/dashboard_list.py b/lambdas/aws-dd-forwarder-3.127.0/datadog/dogshell/dashboard_list.py
new file mode 100644
index 0000000..9164ba7
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/datadog/dogshell/dashboard_list.py
@@ -0,0 +1,339 @@
+# Unless explicitly stated otherwise all files in this repository are licensed under the BSD-3-Clause License.
+# This product includes software developed at Datadog (https://www.datadoghq.com/).
+# Copyright 2015-Present Datadog, Inc
+# stdlib
+import json
+
+# 3p
+from datadog.util.format import pretty_json
+
+# datadog
+from datadog import api
+from datadog.dogshell.common import report_errors, report_warnings
+
+
+class DashboardListClient(object):
+ @classmethod
+ def setup_parser(cls, subparsers):
+ parser = subparsers.add_parser("dashboard_list", help="Create, edit, and delete dashboard lists")
+ verb_parsers = parser.add_subparsers(title="Verbs", dest="verb")
+ verb_parsers.required = True
+
+ # Create Dashboard List parser
+ post_parser = verb_parsers.add_parser("post", help="Create a dashboard list")
+ post_parser.add_argument("name", help="Name for the dashboard list")
+ post_parser.set_defaults(func=cls._post)
+
+ # Update Dashboard List parser
+ update_parser = verb_parsers.add_parser("update", help="Update existing dashboard list")
+ update_parser.add_argument("dashboard_list_id", help="Dashboard list to replace with the new definition")
+ update_parser.add_argument("name", help="Name for the dashboard list")
+ update_parser.set_defaults(func=cls._update)
+
+ # Show Dashboard List parser
+ show_parser = verb_parsers.add_parser("show", help="Show a dashboard list definition")
+ show_parser.add_argument("dashboard_list_id", help="Dashboard list to show")
+ show_parser.set_defaults(func=cls._show)
+
+ # Show All Dashboard Lists parser
+ show_all_parser = verb_parsers.add_parser("show_all", help="Show a list of all dashboard lists")
+ show_all_parser.set_defaults(func=cls._show_all)
+
+ # Delete Dashboard List parser
+ delete_parser = verb_parsers.add_parser("delete", help="Delete existing dashboard list")
+ delete_parser.add_argument("dashboard_list_id", help="Dashboard list to delete")
+ delete_parser.set_defaults(func=cls._delete)
+
+ # Get Dashboards for Dashboard List parser
+ get_dashboards_parser = verb_parsers.add_parser(
+ "show_dashboards", help="Show a list of all dashboards for an existing dashboard list"
+ )
+ get_dashboards_parser.add_argument("dashboard_list_id", help="Dashboard list to show dashboards from")
+ get_dashboards_parser.set_defaults(func=cls._show_dashboards)
+
+ # Get Dashboards for Dashboard List parser (v2)
+ get_dashboards_v2_parser = verb_parsers.add_parser(
+ "show_dashboards_v2", help="Show a list of all dashboards for an existing dashboard list"
+ )
+ get_dashboards_v2_parser.add_argument("dashboard_list_id", help="Dashboard list to show dashboards from")
+ get_dashboards_v2_parser.set_defaults(func=cls._show_dashboards_v2)
+
+ # Add Dashboards to Dashboard List parser
+ add_dashboards_parser = verb_parsers.add_parser(
+ "add_dashboards", help="Add dashboards to an existing dashboard list"
+ )
+ add_dashboards_parser.add_argument("dashboard_list_id", help="Dashboard list to add dashboards to")
+
+ add_dashboards_parser.add_argument(
+ "dashboards",
+ help="A JSON list of dashboard dicts, e.g. "
+ + '[{"type": "custom_timeboard", "id": 1234}, '
+ + '{"type": "custom_screenboard", "id": 123}]',
+ )
+ add_dashboards_parser.set_defaults(func=cls._add_dashboards)
+
+ # Add Dashboards to Dashboard List parser (v2)
+ add_dashboards_v2_parser = verb_parsers.add_parser(
+ "add_dashboards_v2", help="Add dashboards to an existing dashboard list"
+ )
+ add_dashboards_v2_parser.add_argument("dashboard_list_id", help="Dashboard list to add dashboards to")
+ add_dashboards_v2_parser.add_argument(
+ "dashboards",
+ help="A JSON list of dashboard dicts, e.g. "
+ + '[{"type": "custom_timeboard", "id": "ewc-a4f-8ps"}, '
+ + '{"type": "custom_screenboard", "id": "kwj-3t3-d3m"}]',
+ )
+ add_dashboards_v2_parser.set_defaults(func=cls._add_dashboards_v2)
+
+ # Update Dashboards of Dashboard List parser
+ update_dashboards_parser = verb_parsers.add_parser(
+ "update_dashboards", help="Update dashboards of an existing dashboard list"
+ )
+ update_dashboards_parser.add_argument("dashboard_list_id", help="Dashboard list to update with dashboards")
+ update_dashboards_parser.add_argument(
+ "dashboards",
+ help="A JSON list of dashboard dicts, e.g. "
+ + '[{"type": "custom_timeboard", "id": 1234}, '
+ + '{"type": "custom_screenboard", "id": 123}]',
+ )
+ update_dashboards_parser.set_defaults(func=cls._update_dashboards)
+
+ # Update Dashboards of Dashboard List parser (v2)
+ update_dashboards_v2_parser = verb_parsers.add_parser(
+ "update_dashboards_v2", help="Update dashboards of an existing dashboard list"
+ )
+ update_dashboards_v2_parser.add_argument("dashboard_list_id", help="Dashboard list to update with dashboards")
+ update_dashboards_v2_parser.add_argument(
+ "dashboards",
+ help="A JSON list of dashboard dicts, e.g. "
+ + '[{"type": "custom_timeboard", "id": "ewc-a4f-8ps"}, '
+ + '{"type": "custom_screenboard", "id": "kwj-3t3-d3m"}]',
+ )
+ update_dashboards_v2_parser.set_defaults(func=cls._update_dashboards_v2)
+
+ # Delete Dashboards from Dashboard List parser
+ delete_dashboards_parser = verb_parsers.add_parser(
+ "delete_dashboards", help="Delete dashboards from an existing dashboard list"
+ )
+ delete_dashboards_parser.add_argument("dashboard_list_id", help="Dashboard list to delete dashboards from")
+ delete_dashboards_parser.add_argument(
+ "dashboards",
+ help="A JSON list of dashboard dicts, e.g. "
+ + '[{"type": "custom_timeboard", "id": 1234}, '
+ + '{"type": "custom_screenboard", "id": 123}]',
+ )
+ delete_dashboards_parser.set_defaults(func=cls._delete_dashboards)
+
+ # Delete Dashboards from Dashboard List parser
+ delete_dashboards_v2_parser = verb_parsers.add_parser(
+ "delete_dashboards_v2", help="Delete dashboards from an existing dashboard list"
+ )
+ delete_dashboards_v2_parser.add_argument("dashboard_list_id", help="Dashboard list to delete dashboards from")
+ delete_dashboards_v2_parser.add_argument(
+ "dashboards",
+ help="A JSON list of dashboard dicts, e.g. "
+ + '[{"type": "custom_timeboard", "id": "ewc-a4f-8ps"}, '
+ + '{"type": "custom_screenboard", "id": "kwj-3t3-d3m"}]',
+ )
+ delete_dashboards_v2_parser.set_defaults(func=cls._delete_dashboards_v2)
+
+ @classmethod
+ def _post(cls, args):
+ api._timeout = args.timeout
+ format = args.format
+ name = args.name
+
+ res = api.DashboardList.create(name=name)
+ report_warnings(res)
+ report_errors(res)
+
+ if format == "pretty":
+ print(pretty_json(res))
+ else:
+ print(json.dumps(res))
+
+ @classmethod
+ def _update(cls, args):
+ api._timeout = args.timeout
+ format = args.format
+ dashboard_list_id = args.dashboard_list_id
+ name = args.name
+
+ res = api.DashboardList.update(dashboard_list_id, name=name)
+ report_warnings(res)
+ report_errors(res)
+
+ if format == "pretty":
+ print(pretty_json(res))
+ else:
+ print(json.dumps(res))
+
+ @classmethod
+ def _show(cls, args):
+ api._timeout = args.timeout
+ format = args.format
+ dashboard_list_id = args.dashboard_list_id
+
+ res = api.DashboardList.get(dashboard_list_id)
+ report_warnings(res)
+ report_errors(res)
+
+ if format == "pretty":
+ print(pretty_json(res))
+ else:
+ print(json.dumps(res))
+
+ @classmethod
+ def _show_all(cls, args):
+ api._timeout = args.timeout
+ format = args.format
+
+ res = api.DashboardList.get_all()
+ report_warnings(res)
+ report_errors(res)
+
+ if format == "pretty":
+ print(pretty_json(res))
+ else:
+ print(json.dumps(res))
+
+ @classmethod
+ def _delete(cls, args):
+ api._timeout = args.timeout
+ format = args.format
+ dashboard_list_id = args.dashboard_list_id
+
+ res = api.DashboardList.delete(dashboard_list_id)
+ report_warnings(res)
+ report_errors(res)
+
+ if format == "pretty":
+ print(pretty_json(res))
+ else:
+ print(json.dumps(res))
+
+ @classmethod
+ def _show_dashboards(cls, args):
+ api._timeout = args.timeout
+ format = args.format
+ dashboard_list_id = args.dashboard_list_id
+
+ res = api.DashboardList.get_items(dashboard_list_id)
+ report_warnings(res)
+ report_errors(res)
+
+ if format == "pretty":
+ print(pretty_json(res))
+ else:
+ print(json.dumps(res))
+
+ @classmethod
+ def _show_dashboards_v2(cls, args):
+ api._timeout = args.timeout
+ format = args.format
+ dashboard_list_id = args.dashboard_list_id
+
+ res = api.DashboardList.v2.get_items(dashboard_list_id)
+ report_warnings(res)
+ report_errors(res)
+
+ if format == "pretty":
+ print(pretty_json(res))
+ else:
+ print(json.dumps(res))
+
+ @classmethod
+ def _add_dashboards(cls, args):
+ api._timeout = args.timeout
+ format = args.format
+ dashboard_list_id = args.dashboard_list_id
+ dashboards = json.loads(args.dashboards)
+
+ res = api.DashboardList.add_items(dashboard_list_id, dashboards=dashboards)
+ report_warnings(res)
+ report_errors(res)
+
+ if format == "pretty":
+ print(pretty_json(res))
+ else:
+ print(json.dumps(res))
+
+ @classmethod
+ def _add_dashboards_v2(cls, args):
+ api._timeout = args.timeout
+ format = args.format
+ dashboard_list_id = args.dashboard_list_id
+ dashboards = json.loads(args.dashboards)
+
+ res = api.DashboardList.v2.add_items(dashboard_list_id, dashboards=dashboards)
+ report_warnings(res)
+ report_errors(res)
+
+ if format == "pretty":
+ print(pretty_json(res))
+ else:
+ print(json.dumps(res))
+
+ @classmethod
+ def _update_dashboards(cls, args):
+ api._timeout = args.timeout
+ format = args.format
+ dashboard_list_id = args.dashboard_list_id
+ dashboards = json.loads(args.dashboards)
+
+ res = api.DashboardList.update_items(dashboard_list_id, dashboards=dashboards)
+ report_warnings(res)
+ report_errors(res)
+
+ if format == "pretty":
+ print(pretty_json(res))
+ else:
+ print(json.dumps(res))
+
+ @classmethod
+ def _update_dashboards_v2(cls, args):
+ api._timeout = args.timeout
+ format = args.format
+ dashboard_list_id = args.dashboard_list_id
+ dashboards = json.loads(args.dashboards)
+
+ res = api.DashboardList.v2.update_items(dashboard_list_id, dashboards=dashboards)
+ report_warnings(res)
+ report_errors(res)
+
+ if format == "pretty":
+ print(pretty_json(res))
+ else:
+ print(json.dumps(res))
+
+ @classmethod
+ def _delete_dashboards(cls, args):
+ api._timeout = args.timeout
+ format = args.format
+ dashboard_list_id = args.dashboard_list_id
+ dashboards = json.loads(args.dashboards)
+
+ res = api.DashboardList.delete_items(dashboard_list_id, dashboards=dashboards)
+ report_warnings(res)
+ report_errors(res)
+
+ if format == "pretty":
+ print(pretty_json(res))
+ else:
+ print(json.dumps(res))
+
+ @classmethod
+ def _delete_dashboards_v2(cls, args):
+ api._timeout = args.timeout
+ format = args.format
+ dashboard_list_id = args.dashboard_list_id
+ dashboards = json.loads(args.dashboards)
+
+ res = api.DashboardList.v2.delete_items(dashboard_list_id, dashboards=dashboards)
+ report_warnings(res)
+ report_errors(res)
+
+ if format == "pretty":
+ print(pretty_json(res))
+ else:
+ print(json.dumps(res))
diff --git a/lambdas/aws-dd-forwarder-3.127.0/datadog/dogshell/downtime.py b/lambdas/aws-dd-forwarder-3.127.0/datadog/dogshell/downtime.py
new file mode 100644
index 0000000..1c53b46
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/datadog/dogshell/downtime.py
@@ -0,0 +1,132 @@
+# Unless explicitly stated otherwise all files in this repository are licensed under the BSD-3-Clause License.
+# This product includes software developed at Datadog (https://www.datadoghq.com/).
+# Copyright 2015-Present Datadog, Inc
+# stdlib
+import json
+
+# 3p
+from datadog.util.format import pretty_json
+
+# datadog
+from datadog import api
+from datadog.dogshell.common import report_errors, report_warnings
+
+
+class DowntimeClient(object):
+ @classmethod
+ def setup_parser(cls, subparsers):
+ parser = subparsers.add_parser("downtime", help="Create, edit, and delete downtimes")
+ parser.add_argument(
+ "--string_ids",
+ action="store_true",
+ dest="string_ids",
+ help="Represent downtime IDs as strings instead of ints in JSON",
+ )
+
+ verb_parsers = parser.add_subparsers(title="Verbs", dest="verb")
+ verb_parsers.required = True
+
+ post_parser = verb_parsers.add_parser("post", help="Create a downtime")
+ post_parser.add_argument("scope", help="scope to apply downtime to")
+ post_parser.add_argument("start", help="POSIX timestamp to start the downtime", default=None)
+ post_parser.add_argument("--end", help="POSIX timestamp to end the downtime", default=None)
+ post_parser.add_argument(
+ "--message", help="message to include with notifications" " for this downtime", default=None
+ )
+ post_parser.set_defaults(func=cls._schedule_downtime)
+
+ update_parser = verb_parsers.add_parser("update", help="Update existing downtime")
+ update_parser.add_argument("downtime_id", help="downtime to replace" " with the new definition")
+ update_parser.add_argument("--scope", help="scope to apply downtime to")
+ update_parser.add_argument("--start", help="POSIX timestamp to start" " the downtime", default=None)
+ update_parser.add_argument("--end", help="POSIX timestamp to" " end the downtime", default=None)
+ update_parser.add_argument(
+ "--message", help="message to include with notifications" " for this downtime", default=None
+ )
+ update_parser.set_defaults(func=cls._update_downtime)
+
+ show_parser = verb_parsers.add_parser("show", help="Show a downtime definition")
+ show_parser.add_argument("downtime_id", help="downtime to show")
+ show_parser.set_defaults(func=cls._show_downtime)
+
+ show_all_parser = verb_parsers.add_parser("show_all", help="Show a list of all downtimes")
+ show_all_parser.add_argument(
+ "--current_only", help="only return downtimes that" " are active when the request is made", default=None
+ )
+ show_all_parser.set_defaults(func=cls._show_all_downtime)
+
+ delete_parser = verb_parsers.add_parser("delete", help="Delete a downtime")
+ delete_parser.add_argument("downtime_id", help="downtime to delete")
+ delete_parser.set_defaults(func=cls._cancel_downtime)
+
+ cancel_parser = verb_parsers.add_parser("cancel_by_scope", help="Cancel all downtimes with a given scope")
+ cancel_parser.add_argument("scope", help="The scope of the downtimes to cancel")
+ cancel_parser.set_defaults(func=cls._cancel_downtime_by_scope)
+
+ @classmethod
+ def _schedule_downtime(cls, args):
+ api._timeout = args.timeout
+ format = args.format
+ res = api.Downtime.create(scope=args.scope, start=args.start, end=args.end, message=args.message)
+ report_warnings(res)
+ report_errors(res)
+ if format == "pretty":
+ print(pretty_json(res))
+ else:
+ print(json.dumps(res))
+
+ @classmethod
+ def _update_downtime(cls, args):
+ api._timeout = args.timeout
+ format = args.format
+ res = api.Downtime.update(
+ args.downtime_id, scope=args.scope, start=args.start, end=args.end, message=args.message
+ )
+ report_warnings(res)
+ report_errors(res)
+ if format == "pretty":
+ print(pretty_json(res))
+ else:
+ print(json.dumps(res))
+
+ @classmethod
+ def _cancel_downtime(cls, args):
+ api._timeout = args.timeout
+ res = api.Downtime.delete(args.downtime_id)
+ if res is not None:
+ report_warnings(res)
+ report_errors(res)
+
+ @classmethod
+ def _show_downtime(cls, args):
+ api._timeout = args.timeout
+ format = args.format
+ res = api.Downtime.get(args.downtime_id)
+ report_warnings(res)
+ report_errors(res)
+ if format == "pretty":
+ print(pretty_json(res))
+ else:
+ print(json.dumps(res))
+
+ @classmethod
+ def _show_all_downtime(cls, args):
+ api._timeout = args.timeout
+ format = args.format
+ res = api.Downtime.get_all(current_only=args.current_only)
+ report_warnings(res)
+ report_errors(res)
+ if format == "pretty":
+ print(pretty_json(res))
+ else:
+ print(json.dumps(res))
+
+ @classmethod
+ def _cancel_downtime_by_scope(cls, args):
+ api._timeout = args.timeout
+ format = args.format
+ res = api.Downtime.cancel_downtime_by_scope(scope=args.scope)
+ if format == "pretty":
+ print(pretty_json(res))
+ else:
+ print(json.dumps(res))
diff --git a/lambdas/aws-dd-forwarder-3.127.0/datadog/dogshell/event.py b/lambdas/aws-dd-forwarder-3.127.0/datadog/dogshell/event.py
new file mode 100644
index 0000000..89d68a6
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/datadog/dogshell/event.py
@@ -0,0 +1,201 @@
+# Unless explicitly stated otherwise all files in this repository are licensed under the BSD-3-Clause License.
+# This product includes software developed at Datadog (https://www.datadoghq.com/).
+# Copyright 2015-Present Datadog, Inc
+# stdlib
+import datetime
+import time
+import re
+import sys
+import json
+
+# datadog
+from datadog import api
+from datadog.dogshell.common import report_errors, report_warnings
+
+
+time_pat = re.compile(r"(?P[0-9]*\.?[0-9]+)(?P[mhd])")
+
+
+def prettyprint_event(event):
+ title = event["title"] or ""
+ text = event.get("text", "") or ""
+ handle = event.get("handle", "") or ""
+ date = event["date_happened"]
+ dt = datetime.datetime.fromtimestamp(date)
+ link = event["url"]
+
+ # Print
+ print((title + " " + text + " " + " (" + handle + ")").strip())
+ print(dt.isoformat(" ") + " | " + link)
+
+
+def print_event(event):
+ prettyprint_event(event)
+
+
+def prettyprint_event_details(event):
+ prettyprint_event(event)
+
+
+def print_event_details(event):
+ prettyprint_event(event)
+
+
+def parse_time(timestring):
+ now = time.mktime(datetime.datetime.now().timetuple())
+ if timestring is None:
+ t = now
+ else:
+ try:
+ t = int(timestring)
+ except Exception:
+ match = time_pat.match(timestring)
+ if match is None:
+ raise Exception
+ delta = float(match.group("delta"))
+ unit = match.group("unit")
+ if unit == "m":
+ delta = delta * 60
+ if unit == "h":
+ delta = delta * 60 * 60
+ if unit == "d":
+ delta = delta * 60 * 60 * 24
+ t = now - int(delta)
+ return int(t)
+
+
+class EventClient(object):
+ @classmethod
+ def setup_parser(cls, subparsers):
+ parser = subparsers.add_parser("event", help="Post events, get event details," " and view the event stream.")
+ verb_parsers = parser.add_subparsers(title="Verbs", dest="verb")
+ verb_parsers.required = True
+
+ post_parser = verb_parsers.add_parser("post", help="Post events.")
+ post_parser.add_argument("title", help="event title")
+ post_parser.add_argument(
+ "--date_happened",
+ type=int,
+ help="POSIX timestamp" " when the event occurred. if unset defaults to the current time.",
+ )
+ post_parser.add_argument("--handle", help="user to post as. if unset, submits " "as the generic API user.")
+ post_parser.add_argument("--priority", help='"normal" or "low". defaults to "normal"', default="normal")
+ post_parser.add_argument(
+ "--related_event_id", help="event to post as a child of." " if unset, posts a top-level event"
+ )
+ post_parser.add_argument("--tags", help="comma separated list of tags")
+ post_parser.add_argument("--host", help="related host (default to the local host name)", default="")
+ post_parser.add_argument(
+ "--no_host", help="no host is associated with the event" " (overrides --host))", action="store_true"
+ )
+ post_parser.add_argument("--device", help="related device (e.g. eth0, /dev/sda1)")
+ post_parser.add_argument("--aggregation_key", help="key to aggregate the event with")
+ post_parser.add_argument("--type", help="type of event, e.g. nagios, jenkins, etc.")
+ post_parser.add_argument("--alert_type", help='"error", "warning", "info" or "success". defaults to "info"')
+ post_parser.add_argument("message", help="event message body. " "if unset, reads from stdin.", nargs="?")
+ post_parser.set_defaults(func=cls._post)
+
+ show_parser = verb_parsers.add_parser("show", help="Show event details.")
+ show_parser.add_argument("event_id", help="event to show")
+ show_parser.set_defaults(func=cls._show)
+
+ stream_parser = verb_parsers.add_parser(
+ "stream",
+ help="Retrieve events from the Event Stream",
+ description="Stream start and end times can be specified as either a POSIX"
+ " timestamp (e.g. the output of `date +%s`) or as a period of"
+ " time in the past (e.g. '5m', '6h', '3d').",
+ )
+ stream_parser.add_argument("start", help="start date for the stream request")
+ stream_parser.add_argument("end", help="end date for the stream request " "(defaults to 'now')", nargs="?")
+ stream_parser.add_argument("--priority", help="filter by priority." " 'normal' or 'low'. defaults to 'normal'")
+ stream_parser.add_argument("--sources", help="comma separated list of sources to filter by")
+ stream_parser.add_argument("--tags", help="comma separated list of tags to filter by")
+ stream_parser.set_defaults(func=cls._stream)
+
+ @classmethod
+ def _post(cls, args):
+ """
+ Post an event.
+ """
+ api._timeout = args.timeout
+ format = args.format
+ message = args.message
+ if message is None:
+ message = sys.stdin.read()
+ if args.tags is not None:
+ tags = [t.strip() for t in args.tags.split(",")]
+ else:
+ tags = None
+
+ host = None if args.no_host else args.host
+
+ # Submit event
+ res = api.Event.create(
+ title=args.title,
+ text=message,
+ date_happened=args.date_happened,
+ handle=args.handle,
+ priority=args.priority,
+ related_event_id=args.related_event_id,
+ tags=tags,
+ host=host,
+ device=args.device,
+ aggregation_key=args.aggregation_key,
+ source_type_name=args.type,
+ alert_type=args.alert_type,
+ )
+
+ # Report
+ report_warnings(res)
+ report_errors(res)
+ if format == "pretty":
+ prettyprint_event(res["event"])
+ elif format == "raw":
+ print(json.dumps(res))
+ else:
+ print_event(res["event"])
+
+ @classmethod
+ def _show(cls, args):
+ api._timeout = args.timeout
+ format = args.format
+ res = api.Event.get(args.event_id)
+ report_warnings(res)
+ report_errors(res)
+ if format == "pretty":
+ prettyprint_event_details(res["event"])
+ elif format == "raw":
+ print(json.dumps(res))
+ else:
+ print_event_details(res["event"])
+
+ @classmethod
+ def _stream(cls, args):
+ api._timeout = args.timeout
+ format = args.format
+ if args.sources is not None:
+ sources = [s.strip() for s in args.sources.split(",")]
+ else:
+ sources = None
+ if args.tags is not None:
+ tags = [t.strip() for t in args.tags.split(",")]
+ else:
+ tags = None
+ start = parse_time(args.start)
+ end = parse_time(args.end)
+ # res = api.Event.query(start=start, end=end)
+ # TODO FIXME
+ res = api.Event.query(start=start, end=end, priority=args.priority, sources=sources, tags=tags)
+ report_warnings(res)
+ report_errors(res)
+ if format == "pretty":
+ for event in res["events"]:
+ prettyprint_event(event)
+ print()
+ elif format == "raw":
+ print(json.dumps(res))
+ else:
+ for event in res["events"]:
+ print_event(event)
+ print()
diff --git a/lambdas/aws-dd-forwarder-3.127.0/datadog/dogshell/host.py b/lambdas/aws-dd-forwarder-3.127.0/datadog/dogshell/host.py
new file mode 100644
index 0000000..1f93a78
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/datadog/dogshell/host.py
@@ -0,0 +1,61 @@
+# Unless explicitly stated otherwise all files in this repository are licensed under the BSD-3-Clause License.
+# This product includes software developed at Datadog (https://www.datadoghq.com/).
+# Copyright 2015-Present Datadog, Inc
+# stdlib
+import json
+
+# 3p
+from datadog.util.format import pretty_json
+
+# datadog
+from datadog import api
+from datadog.dogshell.common import report_errors, report_warnings
+
+
+class HostClient(object):
+ @classmethod
+ def setup_parser(cls, subparsers):
+ parser = subparsers.add_parser("host", help="Mute, unmute hosts")
+ verb_parsers = parser.add_subparsers(title="Verbs", dest="verb")
+ verb_parsers.required = True
+
+ mute_parser = verb_parsers.add_parser("mute", help="Mute a host")
+ mute_parser.add_argument("host_name", help="host to mute")
+ mute_parser.add_argument(
+ "--end", help="POSIX timestamp, if omitted," " host will be muted until explicitly unmuted", default=None
+ )
+ mute_parser.add_argument("--message", help="string to associate with the" " muting of this host", default=None)
+ mute_parser.add_argument(
+ "--override",
+ help="true/false, if true and the host is already" " muted, will overwrite existing end on the host",
+ action="store_true",
+ )
+ mute_parser.set_defaults(func=cls._mute)
+
+ unmute_parser = verb_parsers.add_parser("unmute", help="Unmute a host")
+ unmute_parser.add_argument("host_name", help="host to mute")
+ unmute_parser.set_defaults(func=cls._unmute)
+
+ @classmethod
+ def _mute(cls, args):
+ api._timeout = args.timeout
+ format = args.format
+ res = api.Host.mute(args.host_name, end=args.end, message=args.message, override=args.override)
+ report_warnings(res)
+ report_errors(res)
+ if format == "pretty":
+ print(pretty_json(res))
+ else:
+ print(json.dumps(res))
+
+ @classmethod
+ def _unmute(cls, args):
+ api._timeout = args.timeout
+ format = args.format
+ res = api.Host.unmute(args.host_name)
+ report_warnings(res)
+ report_errors(res)
+ if format == "pretty":
+ print(pretty_json(res))
+ else:
+ print(json.dumps(res))
diff --git a/lambdas/aws-dd-forwarder-3.127.0/datadog/dogshell/metric.py b/lambdas/aws-dd-forwarder-3.127.0/datadog/dogshell/metric.py
new file mode 100644
index 0000000..135e4b9
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/datadog/dogshell/metric.py
@@ -0,0 +1,72 @@
+# Unless explicitly stated otherwise all files in this repository are licensed under the BSD-3-Clause License.
+# This product includes software developed at Datadog (https://www.datadoghq.com/).
+# Copyright 2015-Present Datadog, Inc
+# stdlib
+from collections import defaultdict
+
+# datadog
+from datadog import api
+from datadog.dogshell.common import report_errors, report_warnings
+
+
+class MetricClient(object):
+ @classmethod
+ def setup_parser(cls, subparsers):
+ parser = subparsers.add_parser("metric", help="Post metrics.")
+ verb_parsers = parser.add_subparsers(title="Verbs", dest="verb")
+ verb_parsers.required = True
+
+ post_parser = verb_parsers.add_parser("post", help="Post metrics")
+ post_parser.add_argument("name", help="metric name")
+ post_parser.add_argument("value", help="metric value (integer or decimal value)", type=float)
+ post_parser.add_argument(
+ "--host", help="scopes your metric to a specific host " "(default to the local host name)", default=""
+ )
+ post_parser.add_argument(
+ "--no_host", help="no host is associated with the metric" " (overrides --host))", action="store_true"
+ )
+ post_parser.add_argument("--device", help="scopes your metric to a specific device", default=None)
+ post_parser.add_argument("--tags", help="comma-separated list of tags", default=None)
+ post_parser.add_argument(
+ "--localhostname",
+ help="deprecated, used to force `--host`"
+ " to the local hostname "
+ "(now default when no `--host` is specified)",
+ action="store_true",
+ )
+ post_parser.add_argument(
+ "--type", help="type of the metric - gauge(32bit float)" " or counter(64bit integer)", default=None
+ )
+ parser.set_defaults(func=cls._post)
+
+ @classmethod
+ def _post(cls, args):
+ """
+ Post a metric.
+ """
+ # Format parameters
+ api._timeout = args.timeout
+
+ host = None if args.no_host else args.host
+
+ if args.tags:
+ tags = sorted(set([t.strip() for t in args.tags.split(",") if t]))
+ else:
+ tags = None
+
+ # Submit metric
+ res = api.Metric.send(
+ metric=args.name, points=args.value, host=host, device=args.device, tags=tags, metric_type=args.type
+ )
+
+ # Report
+ res = defaultdict(list, res)
+
+ if args.localhostname:
+ # Warn about`--localhostname` command line flag deprecation
+ res["warnings"].append(
+ u"`--localhostname` command line flag is deprecated, made default when no `--host` "
+ u"is specified. See the `--host` option for more information."
+ )
+ report_warnings(res)
+ report_errors(res)
diff --git a/lambdas/aws-dd-forwarder-3.127.0/datadog/dogshell/monitor.py b/lambdas/aws-dd-forwarder-3.127.0/datadog/dogshell/monitor.py
new file mode 100644
index 0000000..ddc207e
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/datadog/dogshell/monitor.py
@@ -0,0 +1,431 @@
+# Unless explicitly stated otherwise all files in this repository are licensed under the BSD-3-Clause License.
+# This product includes software developed at Datadog (https://www.datadoghq.com/).
+# Copyright 2015-Present Datadog, Inc
+# stdlib
+import argparse
+import json
+
+# 3p
+from datadog.util.format import pretty_json
+
+# datadog
+from datadog import api
+from datadog.dogshell.common import report_errors, report_warnings, print_err
+
+
+class MonitorClient(object):
+ @classmethod
+ def setup_parser(cls, subparsers):
+ parser = subparsers.add_parser("monitor", help="Create, edit, and delete monitors")
+ parser.add_argument(
+ "--string_ids",
+ action="store_true",
+ dest="string_ids",
+ help="Represent monitor IDs as strings instead of ints in JSON",
+ )
+
+ verb_parsers = parser.add_subparsers(title="Verbs", dest="verb")
+ verb_parsers.required = True
+
+ post_parser = verb_parsers.add_parser("post", help="Create a monitor")
+ post_parser.add_argument("type", help="type of the monitor, e.g." "'metric alert' 'service check'")
+ post_parser.add_argument(
+ "query", help="query to notify on with syntax varying " "depending on what type of monitor you are creating"
+ )
+ post_parser.add_argument("--name", help="name of the alert", default=None)
+ post_parser.add_argument(
+ "--message", help="message to include with notifications" " for this monitor", default=None
+ )
+ post_parser.add_argument("--tags", help="comma-separated list of tags", default=None)
+ post_parser.add_argument(
+ "--priority",
+ help="Integer from 1 (high) to 5 (low) indicating alert severity.",
+ default=None
+ )
+ post_parser.add_argument("--options", help="json options for the monitor", default=None)
+ post_parser.set_defaults(func=cls._post)
+
+ file_post_parser = verb_parsers.add_parser("fpost", help="Create a monitor from file")
+ file_post_parser.add_argument("file", help="json file holding all details", type=argparse.FileType("r"))
+ file_post_parser.set_defaults(func=cls._file_post)
+
+ update_parser = verb_parsers.add_parser("update", help="Update existing monitor")
+ update_parser.add_argument("monitor_id", help="monitor to replace with the new definition")
+ update_parser.add_argument(
+ "type",
+ nargs="?",
+ help="[Deprecated] optional argument preferred" "type of the monitor, e.g. 'metric alert' 'service check'",
+ default=None,
+ )
+ update_parser.add_argument(
+ "query",
+ nargs="?",
+ help="[Deprecated] optional argument preferred"
+ "query to notify on with syntax varying depending on monitor type",
+ default=None,
+ )
+ update_parser.add_argument(
+ "--type", help="type of the monitor, e.g. " "'metric alert' 'service check'", default=None, dest="type_opt"
+ )
+ update_parser.add_argument(
+ "--query",
+ help="query to notify on with syntax varying" " depending on monitor type",
+ default=None,
+ dest="query_opt",
+ )
+ update_parser.add_argument("--name", help="name of the alert", default=None)
+ update_parser.add_argument("--tags", help="comma-separated list of tags", default=None)
+ update_parser.add_argument(
+ "--message", help="message to include with " "notifications for this monitor", default=None
+ )
+ update_parser.add_argument(
+ "--priority",
+ help="Integer from 1 (high) to 5 (low) indicating alert severity.",
+ default=None
+ )
+ update_parser.add_argument("--options", help="json options for the monitor", default=None)
+ update_parser.set_defaults(func=cls._update)
+
+ file_update_parser = verb_parsers.add_parser("fupdate", help="Update existing" " monitor from file")
+ file_update_parser.add_argument("file", help="json file holding all details", type=argparse.FileType("r"))
+ file_update_parser.set_defaults(func=cls._file_update)
+
+ show_parser = verb_parsers.add_parser("show", help="Show a monitor definition")
+ show_parser.add_argument("monitor_id", help="monitor to show")
+ show_parser.set_defaults(func=cls._show)
+
+ show_all_parser = verb_parsers.add_parser("show_all", help="Show a list of all monitors")
+ show_all_parser.add_argument(
+ "--group_states",
+ help="comma separated list of group states to filter by"
+ "(choose one or more from 'all', 'alert', 'warn', or 'no data')",
+ )
+ show_all_parser.add_argument("--name", help="string to filter monitors by name")
+ show_all_parser.add_argument(
+ "--tags",
+ help="comma separated list indicating what tags, if any, "
+ "should be used to filter the list of monitors by scope (e.g. 'host:host0')",
+ )
+ show_all_parser.add_argument(
+ "--monitor_tags",
+ help="comma separated list indicating what service "
+ "and/or custom tags, if any, should be used to filter the list of monitors",
+ )
+
+ show_all_parser.set_defaults(func=cls._show_all)
+
+ delete_parser = verb_parsers.add_parser("delete", help="Delete a monitor")
+ delete_parser.add_argument("monitor_id", help="monitor to delete")
+ delete_parser.set_defaults(func=cls._delete)
+
+ mute_all_parser = verb_parsers.add_parser("mute_all", help="Globally mute " "monitors (downtime over *)")
+ mute_all_parser.set_defaults(func=cls._mute_all)
+
+ unmute_all_parser = verb_parsers.add_parser(
+ "unmute_all", help="Globally unmute " "monitors (cancel downtime over *)"
+ )
+ unmute_all_parser.set_defaults(func=cls._unmute_all)
+
+ mute_parser = verb_parsers.add_parser("mute", help="Mute a monitor")
+ mute_parser.add_argument("monitor_id", help="monitor to mute")
+ mute_parser.add_argument("--scope", help="scope to apply the mute to," " e.g. role:db (optional)", default=[])
+ mute_parser.add_argument(
+ "--end", help="POSIX timestamp for when" " the mute should end (optional)", default=None
+ )
+ mute_parser.set_defaults(func=cls._mute)
+
+ unmute_parser = verb_parsers.add_parser("unmute", help="Unmute a monitor")
+ unmute_parser.add_argument("monitor_id", help="monitor to unmute")
+ unmute_parser.add_argument("--scope", help="scope to unmute (must be muted), " "e.g. role:db", default=[])
+ unmute_parser.add_argument("--all_scopes", help="clear muting across all scopes", action="store_true")
+ unmute_parser.set_defaults(func=cls._unmute)
+
+ can_delete_parser = verb_parsers.add_parser("can_delete", help="Check if you can delete some monitors")
+ can_delete_parser.add_argument("monitor_ids", help="monitors to check if they can be deleted")
+ can_delete_parser.set_defaults(func=cls._can_delete)
+
+ validate_parser = verb_parsers.add_parser("validate", help="Validates if a monitor definition is correct")
+ validate_parser.add_argument("type", help="type of the monitor, e.g." "'metric alert' 'service check'")
+ validate_parser.add_argument("query", help="the monitor query")
+ validate_parser.add_argument("--name", help="name of the alert", default=None)
+ validate_parser.add_argument(
+ "--message", help="message to include with notifications" " for this monitor", default=None
+ )
+ validate_parser.add_argument("--tags", help="comma-separated list of tags", default=None)
+ validate_parser.add_argument("--options", help="json options for the monitor", default=None)
+ validate_parser.set_defaults(func=cls._validate)
+
+ @classmethod
+ def _post(cls, args):
+ api._timeout = args.timeout
+ format = args.format
+ options = None
+ if args.options is not None:
+ options = json.loads(args.options)
+
+ if args.tags:
+ tags = sorted(set([t.strip() for t in args.tags.split(",") if t.strip()]))
+ else:
+ tags = None
+
+ body = {
+ "type": args.type,
+ "query": args.query,
+ "name": args.name,
+ "message": args.message,
+ "options": options
+ }
+ if tags:
+ body["tags"] = tags
+ if args.priority:
+ body["priority"] = args.priority
+
+ res = api.Monitor.create(**body)
+ report_warnings(res)
+ report_errors(res)
+ if format == "pretty":
+ print(pretty_json(res))
+ else:
+ print(json.dumps(res))
+
+ @classmethod
+ def _file_post(cls, args):
+ api._timeout = args.timeout
+ format = args.format
+ monitor = json.load(args.file)
+ body = {
+ "type": monitor["type"],
+ "query": monitor["query"],
+ "name": monitor["name"],
+ "message": monitor["message"],
+ "options": monitor["options"]
+ }
+ tags = monitor.get("tags", None)
+ if tags:
+ body["tags"] = tags
+ priority = monitor.get("priority", None)
+ if priority:
+ body["priority"] = priority
+
+ res = api.Monitor.create(**body)
+ report_warnings(res)
+ report_errors(res)
+ if format == "pretty":
+ print(pretty_json(res))
+ else:
+ print(json.dumps(res))
+
+ @classmethod
+ def _update(cls, args):
+ api._timeout = args.timeout
+ format = args.format
+
+ to_update = {}
+ if args.type:
+ if args.type_opt:
+ msg = "Duplicate arguments for `type`. Using optional value --type"
+ print_err("WARNING: {}".format(msg))
+ else:
+ to_update["type"] = args.type
+ msg = "[DEPRECATION] `type` is no longer required to `update` and may be omitted"
+ print_err("WARNING: {}".format(msg))
+ if args.query:
+ if args.query_opt:
+ msg = "Duplicate arguments for `query`. Using optional value --query"
+ print_err("WARNING: {}".format(msg))
+ else:
+ to_update["query"] = args.query
+ msg = "[DEPRECATION] `query` is no longer required to `update` and may be omitted"
+ print_err("WARNING: {}".format(msg))
+ if args.name:
+ to_update["name"] = args.name
+ if args.message:
+ to_update["message"] = args.message
+ if args.type_opt:
+ to_update["type"] = args.type_opt
+ if args.query_opt:
+ to_update["query"] = args.query_opt
+ if args.tags:
+ to_update["tags"] = sorted(set([t.strip() for t in args.tags.split(",") if t.strip()]))
+ if args.priority:
+ to_update["priority"] = args.priority
+
+ if args.options is not None:
+ to_update["options"] = json.loads(args.options)
+
+ res = api.Monitor.update(args.monitor_id, **to_update)
+
+ report_warnings(res)
+ report_errors(res)
+ if format == "pretty":
+ print(pretty_json(res))
+ else:
+ print(json.dumps(res))
+
+ @classmethod
+ def _file_update(cls, args):
+ api._timeout = args.timeout
+ format = args.format
+ monitor = json.load(args.file)
+ body = {
+ "type": monitor["type"],
+ "query": monitor["query"],
+ "name": monitor["name"],
+ "message": monitor["message"],
+ "options": monitor["options"]
+ }
+ tags = monitor.get("tags", None)
+ if tags:
+ body["tags"] = tags
+ priority = monitor.get("priority", None)
+ if priority:
+ body["priority"] = priority
+
+ res = api.Monitor.update(monitor["id"], **body)
+
+ report_warnings(res)
+ report_errors(res)
+ if format == "pretty":
+ print(pretty_json(res))
+ else:
+ print(json.dumps(res))
+
+ @classmethod
+ def _show(cls, args):
+ api._timeout = args.timeout
+ format = args.format
+ res = api.Monitor.get(args.monitor_id)
+ report_warnings(res)
+ report_errors(res)
+
+ if args.string_ids:
+ res["id"] = str(res["id"])
+
+ if format == "pretty":
+ print(pretty_json(res))
+ else:
+ print(json.dumps(res))
+
+ @classmethod
+ def _show_all(cls, args):
+ api._timeout = args.timeout
+ format = args.format
+
+ res = api.Monitor.get_all(
+ group_states=args.group_states, name=args.name, tags=args.tags, monitor_tags=args.monitor_tags
+ )
+ report_warnings(res)
+ report_errors(res)
+
+ if args.string_ids:
+ for d in res:
+ d["id"] = str(d["id"])
+
+ if format == "pretty":
+ print(pretty_json(res))
+ elif format == "raw":
+ print(json.dumps(res))
+ else:
+ for d in res:
+ print(
+ "\t".join(
+ [
+ (str(d["id"])),
+ (cls._escape(d["message"])),
+ (cls._escape(d["name"])),
+ (str(d["options"])),
+ (str(d["org_id"])),
+ (d["query"]),
+ (d["type"]),
+ ]
+ )
+ )
+
+ @classmethod
+ def _delete(cls, args):
+ api._timeout = args.timeout
+ # TODO CHECK
+ res = api.Monitor.delete(args.monitor_id)
+ if res is not None:
+ report_warnings(res)
+ report_errors(res)
+
+ @classmethod
+ def _escape(cls, s):
+ return s.replace("\r", "\\r").replace("\n", "\\n").replace("\t", "\\t")
+
+ @classmethod
+ def _mute_all(cls, args):
+ api._timeout = args.timeout
+ format = args.format
+ res = api.Monitor.mute_all()
+ report_warnings(res)
+ report_errors(res)
+ if format == "pretty":
+ print(pretty_json(res))
+ else:
+ print(json.dumps(res))
+
+ @classmethod
+ def _unmute_all(cls, args):
+ api._timeout = args.timeout
+ res = api.Monitor.unmute_all()
+ if res is not None:
+ report_warnings(res)
+ report_errors(res)
+
+ @classmethod
+ def _mute(cls, args):
+ api._timeout = args.timeout
+ format = args.format
+ res = api.Monitor.mute(args.monitor_id, scope=args.scope, end=args.end)
+ report_warnings(res)
+ report_errors(res)
+ if format == "pretty":
+ print(pretty_json(res))
+ else:
+ print(json.dumps(res))
+
+ @classmethod
+ def _unmute(cls, args):
+ api._timeout = args.timeout
+ res = api.Monitor.unmute(args.monitor_id, scope=args.scope, all_scopes=args.all_scopes)
+ report_warnings(res)
+ report_errors(res)
+ if format == "pretty":
+ print(pretty_json(res))
+ else:
+ print(json.dumps(res))
+
+ @classmethod
+ def _can_delete(cls, args):
+ api._timeout = args.timeout
+ monitor_ids = [i.strip() for i in args.monitor_ids.split(",") if i.strip()]
+ res = api.Monitor.can_delete(monitor_ids=monitor_ids)
+ if format == "pretty":
+ print(pretty_json(res))
+ else:
+ print(json.dumps(res))
+
+ @classmethod
+ def _validate(cls, args):
+ api._timeout = args.timeout
+ format = args.format
+ options = None
+ if args.options is not None:
+ options = json.loads(args.options)
+
+ if args.tags:
+ tags = sorted(set([t.strip() for t in args.tags.split(",") if t.strip()]))
+ else:
+ tags = None
+
+ res = api.Monitor.validate(
+ type=args.type, query=args.query, name=args.name, message=args.message, tags=tags, options=options
+ )
+ # report_warnings(res)
+ # report_errors(res)
+ if format == "pretty":
+ print(pretty_json(res))
+ else:
+ print(json.dumps(res))
diff --git a/lambdas/aws-dd-forwarder-3.127.0/datadog/dogshell/screenboard.py b/lambdas/aws-dd-forwarder-3.127.0/datadog/dogshell/screenboard.py
new file mode 100644
index 0000000..093a3e1
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/datadog/dogshell/screenboard.py
@@ -0,0 +1,308 @@
+# Unless explicitly stated otherwise all files in this repository are licensed under the BSD-3-Clause License.
+# This product includes software developed at Datadog (https://www.datadoghq.com/).
+# Copyright 2015-Present Datadog, Inc
+# stdlib
+import argparse
+import json
+import platform
+import sys
+import webbrowser
+
+# 3p
+from datadog.util.format import pretty_json
+
+# datadog
+from datadog import api
+from datadog.dogshell.common import report_errors, report_warnings, print_err
+from datetime import datetime
+
+
+class ScreenboardClient(object):
+ @classmethod
+ def setup_parser(cls, subparsers):
+ parser = subparsers.add_parser("screenboard", help="Create, edit, and delete screenboards.")
+ parser.add_argument(
+ "--string_ids",
+ action="store_true",
+ dest="string_ids",
+ help="Represent screenboard IDs as strings instead of ints in JSON",
+ )
+
+ verb_parsers = parser.add_subparsers(title="Verbs", dest="verb")
+ verb_parsers.required = True
+
+ post_parser = verb_parsers.add_parser("post", help="Create screenboards.")
+ post_parser.add_argument("title", help="title for the new screenboard")
+ post_parser.add_argument("description", help="short description of the screenboard")
+ post_parser.add_argument(
+ "graphs", help="graph definitions as a JSON string." " if unset, reads from stdin.", nargs="?"
+ )
+ post_parser.add_argument(
+ "--template_variables",
+ type=_template_variables,
+ default=[],
+ help="a json list of template variable dicts, e.g. "
+ "[{'name': 'host', 'prefix': 'host', 'default': 'host:my-host'}]",
+ )
+ post_parser.add_argument("--width", type=int, default=None, help="screenboard width in pixels")
+ post_parser.add_argument("--height", type=int, default=None, help="screenboard height in pixels")
+ post_parser.set_defaults(func=cls._post)
+
+ update_parser = verb_parsers.add_parser("update", help="Update existing screenboards.")
+ update_parser.add_argument("screenboard_id", help="screenboard to replace " " with the new definition")
+ update_parser.add_argument("title", help="title for the new screenboard")
+ update_parser.add_argument("description", help="short description of the screenboard")
+ update_parser.add_argument(
+ "graphs", help="graph definitions as a JSON string." " if unset, reads from stdin.", nargs="?"
+ )
+ update_parser.add_argument(
+ "--template_variables",
+ type=_template_variables,
+ default=[],
+ help="a json list of template variable dicts, e.g. "
+ "[{'name': 'host', 'prefix': 'host', 'default': "
+ "'host:my-host'}]",
+ )
+ update_parser.add_argument("--width", type=int, default=None, help="screenboard width in pixels")
+ update_parser.add_argument("--height", type=int, default=None, help="screenboard height in pixels")
+ update_parser.set_defaults(func=cls._update)
+
+ show_parser = verb_parsers.add_parser("show", help="Show a screenboard definition.")
+ show_parser.add_argument("screenboard_id", help="screenboard to show")
+ show_parser.set_defaults(func=cls._show)
+
+ delete_parser = verb_parsers.add_parser("delete", help="Delete a screenboard.")
+ delete_parser.add_argument("screenboard_id", help="screenboard to delete")
+ delete_parser.set_defaults(func=cls._delete)
+
+ share_parser = verb_parsers.add_parser("share", help="Share an existing screenboard's" " with a public URL.")
+ share_parser.add_argument("screenboard_id", help="screenboard to share")
+ share_parser.set_defaults(func=cls._share)
+
+ revoke_parser = verb_parsers.add_parser("revoke", help="Revoke an existing screenboard's" " with a public URL.")
+ revoke_parser.add_argument("screenboard_id", help="screenboard to revoke")
+ revoke_parser.set_defaults(func=cls._revoke)
+
+ pull_parser = verb_parsers.add_parser("pull", help="Pull a screenboard on the server" " into a local file")
+ pull_parser.add_argument("screenboard_id", help="ID of screenboard to pull")
+ pull_parser.add_argument("filename", help="file to pull screenboard into")
+ pull_parser.set_defaults(func=cls._pull)
+
+ push_parser = verb_parsers.add_parser(
+ "push", help="Push updates to screenboards" " from local files to the server"
+ )
+ push_parser.add_argument(
+ "--append_auto_text",
+ action="store_true",
+ dest="append_auto_text",
+ help="When pushing to the server, appends filename and"
+ " timestamp to the end of the screenboard description",
+ )
+ push_parser.add_argument(
+ "file", help="screenboard files to push to the server", nargs="+", type=argparse.FileType("r")
+ )
+ push_parser.set_defaults(func=cls._push)
+
+ new_file_parser = verb_parsers.add_parser(
+ "new_file", help="Create a new screenboard" " and put its contents in a file"
+ )
+ new_file_parser.add_argument("filename", help="name of file to create with" " empty screenboard")
+ new_file_parser.add_argument(
+ "graphs", help="graph definitions as a JSON string." " if unset, reads from stdin.", nargs="?"
+ )
+ new_file_parser.set_defaults(func=cls._new_file)
+
+ @classmethod
+ def _pull(cls, args):
+ cls._write_screen_to_file(args.screenboard_id, args.filename, args.timeout, args.format, args.string_ids)
+
+ # TODO Is there a test for this one ?
+ @classmethod
+ def _push(cls, args):
+ api._timeout = args.timeout
+ for f in args.file:
+ screen_obj = json.load(f)
+
+ if args.append_auto_text:
+ datetime_str = datetime.now().strftime("%x %X")
+ auto_text = "
\nUpdated at {0} from {1} ({2}) on {3}".format(
+ datetime_str, f.name, screen_obj["id"], platform.node()
+ )
+ screen_obj["description"] += auto_text
+
+ if "id" in screen_obj:
+ # Always convert to int, in case it was originally a string.
+ screen_obj["id"] = int(screen_obj["id"])
+ res = api.Screenboard.update(**screen_obj)
+ else:
+ res = api.Screenboard.create(**screen_obj)
+
+ if "errors" in res:
+ print_err("Upload of screenboard {0} from file {1} failed.".format(screen_obj["id"], f.name))
+
+ report_warnings(res)
+ report_errors(res)
+
+ if format == "pretty":
+ print(pretty_json(res))
+ else:
+ print(json.dumps(res))
+
+ if args.format == "pretty":
+ print("Uploaded file {0} (screenboard {1})".format(f.name, screen_obj["id"]))
+
+ @classmethod
+ def _write_screen_to_file(cls, screenboard_id, filename, timeout, format="raw", string_ids=False):
+ with open(filename, "w") as f:
+ res = api.Screenboard.get(screenboard_id)
+ report_warnings(res)
+ report_errors(res)
+
+ screen_obj = res
+ if "resource" in screen_obj:
+ del screen_obj["resource"]
+ if "url" in screen_obj:
+ del screen_obj["url"]
+
+ if string_ids:
+ screen_obj["id"] = str(screen_obj["id"])
+
+ json.dump(screen_obj, f, indent=2)
+
+ if format == "pretty":
+ print("Downloaded screenboard {0} to file {1}".format(screenboard_id, filename))
+ else:
+ print("{0} {1}".format(screenboard_id, filename))
+
+ @classmethod
+ def _post(cls, args):
+ graphs = sys.stdin.read()
+ api._timeout = args.timeout
+ format = args.format
+ graphs = args.graphs
+ if args.graphs is None:
+ graphs = sys.stdin.read()
+ graphs = json.loads(graphs)
+ res = api.Screenboard.create(
+ title=args.title,
+ description=args.description,
+ graphs=[graphs],
+ template_variables=args.template_variables,
+ width=args.width,
+ height=args.height,
+ )
+ report_warnings(res)
+ report_errors(res)
+ if format == "pretty":
+ print(pretty_json(res))
+ else:
+ print(json.dumps(res))
+
+ @classmethod
+ def _update(cls, args):
+ api._timeout = args.timeout
+ format = args.format
+ graphs = args.graphs
+ if args.graphs is None:
+ graphs = sys.stdin.read()
+ graphs = json.loads(graphs)
+
+ res = api.Screenboard.update(
+ args.screenboard_id,
+ board_title=args.title,
+ description=args.description,
+ widgets=graphs,
+ template_variables=args.template_variables,
+ width=args.width,
+ height=args.height,
+ )
+ report_warnings(res)
+ report_errors(res)
+ if format == "pretty":
+ print(pretty_json(res))
+ else:
+ print(json.dumps(res))
+
+ @classmethod
+ def _web_view(cls, args):
+ dash_id = json.load(args.file)["id"]
+ url = api._api_host + "/dash/dash/{0}".format(dash_id)
+ webbrowser.open(url)
+
+ @classmethod
+ def _show(cls, args):
+ api._timeout = args.timeout
+ format = args.format
+ res = api.Screenboard.get(args.screenboard_id)
+ report_warnings(res)
+ report_errors(res)
+
+ if args.string_ids:
+ res["id"] = str(res["id"])
+
+ if format == "pretty":
+ print(pretty_json(res))
+ else:
+ print(json.dumps(res))
+
+ @classmethod
+ def _delete(cls, args):
+ api._timeout = args.timeout
+ # TODO CHECK
+ res = api.Screenboard.delete(args.screenboard_id)
+ if res is not None:
+ report_warnings(res)
+ report_errors(res)
+
+ @classmethod
+ def _share(cls, args):
+ api._timeout = args.timeout
+ format = args.format
+ res = api.Screenboard.share(args.screenboard_id)
+
+ if format == "pretty":
+ print(pretty_json(res))
+ else:
+ print(json.dumps(res))
+
+ @classmethod
+ def _revoke(cls, args):
+ api._timeout = args.timeout
+ format = args.format
+ res = api.Screenboard.revoke(args.screenboard_id)
+
+ if format == "pretty":
+ print(pretty_json(res))
+ else:
+ print(json.dumps(res))
+
+ @classmethod
+ def _new_file(cls, args):
+ api._timeout = args.timeout
+ format = args.format
+ graphs = args.graphs
+ if args.graphs is None:
+ graphs = sys.stdin.read()
+ graphs = json.loads(graphs)
+ res = api.Screenboard.create(
+ board_title=args.filename, description="Description for {0}".format(args.filename), widgets=[graphs]
+ )
+ report_warnings(res)
+ report_errors(res)
+
+ cls._write_screen_to_file(res["id"], args.filename, args.timeout, format, args.string_ids)
+
+ if format == "pretty":
+ print(pretty_json(res))
+ else:
+ print(json.dumps(res))
+
+
+def _template_variables(tpl_var_input):
+ if "[" not in tpl_var_input:
+ return [v.strip() for v in tpl_var_input.split(",")]
+ else:
+ try:
+ return json.loads(tpl_var_input)
+ except Exception:
+ raise argparse.ArgumentTypeError("bad template_variable json parameter")
diff --git a/lambdas/aws-dd-forwarder-3.127.0/datadog/dogshell/search.py b/lambdas/aws-dd-forwarder-3.127.0/datadog/dogshell/search.py
new file mode 100644
index 0000000..9c1cb47
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/datadog/dogshell/search.py
@@ -0,0 +1,43 @@
+# Unless explicitly stated otherwise all files in this repository are licensed under the BSD-3-Clause License.
+# This product includes software developed at Datadog (https://www.datadoghq.com/).
+# Copyright 2015-Present Datadog, Inc
+# stdlib
+import json
+
+# datadog
+from datadog import api
+from datadog.dogshell.common import report_errors, report_warnings
+
+
+# TODO IS there a test ?
+class SearchClient(object):
+ @classmethod
+ def setup_parser(cls, subparsers):
+ parser = subparsers.add_parser("search", help="search datadog")
+ verb_parsers = parser.add_subparsers(title="Verbs", dest="verb")
+ verb_parsers.required = True
+
+ query_parser = verb_parsers.add_parser("query", help="Search datadog.")
+ query_parser.add_argument("query", help="optionally faceted search query")
+ query_parser.set_defaults(func=cls._query)
+
+ @classmethod
+ def _query(cls, args):
+ api._timeout = args.timeout
+ res = api.Infrastructure.search(q=args.query)
+ report_warnings(res)
+ report_errors(res)
+ if format == "pretty":
+ for facet, results in list(res["results"].items()):
+ for idx, result in enumerate(results):
+ if idx == 0:
+ print("\n")
+ print("%s\t%s" % (facet, result))
+ else:
+ print("%s\t%s" % (" " * len(facet), result))
+ elif format == "raw":
+ print(json.dumps(res))
+ else:
+ for facet, results in list(res["results"].items()):
+ for result in results:
+ print("%s\t%s" % (facet, result))
diff --git a/lambdas/aws-dd-forwarder-3.127.0/datadog/dogshell/service_check.py b/lambdas/aws-dd-forwarder-3.127.0/datadog/dogshell/service_check.py
new file mode 100644
index 0000000..b30f33c
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/datadog/dogshell/service_check.py
@@ -0,0 +1,55 @@
+# Unless explicitly stated otherwise all files in this repository are licensed under the BSD-3-Clause License.
+# This product includes software developed at Datadog (https://www.datadoghq.com/).
+# Copyright 2015-Present Datadog, Inc
+# stdlib
+import json
+
+# 3p
+from datadog.util.format import pretty_json
+
+# datadog
+from datadog import api
+from datadog.dogshell.common import report_errors, report_warnings
+
+
+class ServiceCheckClient(object):
+ @classmethod
+ def setup_parser(cls, subparsers):
+ parser = subparsers.add_parser("service_check", help="Perform service checks")
+ verb_parsers = parser.add_subparsers(title="Verbs", dest="verb")
+ verb_parsers.required = True
+
+ check_parser = verb_parsers.add_parser("check", help="text for the message")
+ check_parser.add_argument("check", help="text for the message")
+ check_parser.add_argument("host_name", help="name of the host submitting the check")
+ check_parser.add_argument(
+ "status",
+ help="integer for the status of the check." " i.e: '0': OK, '1': WARNING, '2': CRITICAL, '3': UNKNOWN",
+ )
+ check_parser.add_argument("--timestamp", help="POSIX timestamp of the event", default=None)
+ check_parser.add_argument("--message", help="description of why this status occurred", default=None)
+ check_parser.add_argument("--tags", help="comma separated list of tags", default=None)
+ check_parser.set_defaults(func=cls._check)
+
+ @classmethod
+ def _check(cls, args):
+ api._timeout = args.timeout
+ format = args.format
+ if args.tags:
+ tags = sorted(set([t.strip() for t in args.tags.split(",") if t.strip()]))
+ else:
+ tags = None
+ res = api.ServiceCheck.check(
+ check=args.check,
+ host_name=args.host_name,
+ status=int(args.status),
+ timestamp=args.timestamp,
+ message=args.message,
+ tags=tags,
+ )
+ report_warnings(res)
+ report_errors(res)
+ if format == "pretty":
+ print(pretty_json(res))
+ else:
+ print(json.dumps(res))
diff --git a/lambdas/aws-dd-forwarder-3.127.0/datadog/dogshell/service_level_objective.py b/lambdas/aws-dd-forwarder-3.127.0/datadog/dogshell/service_level_objective.py
new file mode 100644
index 0000000..13ec928
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/datadog/dogshell/service_level_objective.py
@@ -0,0 +1,426 @@
+# Unless explicitly stated otherwise all files in this repository are licensed under the BSD-3-Clause License.
+# This product includes software developed at Datadog (https://www.datadoghq.com/).
+# Copyright 2015-Present Datadog, Inc
+# stdlib
+import argparse
+import json
+
+# 3p
+from datadog.util.cli import (
+ set_of_ints,
+ comma_set,
+ comma_list_or_empty,
+ parse_date_as_epoch_timestamp,
+)
+from datadog.util.format import pretty_json
+
+# datadog
+from datadog import api
+from datadog.dogshell.common import report_errors, report_warnings
+
+
+class ServiceLevelObjectiveClient(object):
+ @classmethod
+ def setup_parser(cls, subparsers):
+ parser = subparsers.add_parser(
+ "service_level_objective",
+ help="Create, edit, and delete service level objectives",
+ )
+
+ verb_parsers = parser.add_subparsers(title="Verbs", dest="verb")
+ verb_parsers.required = True
+
+ create_parser = verb_parsers.add_parser("create", help="Create a SLO")
+ create_parser.add_argument(
+ "--type",
+ help="type of the SLO, e.g.",
+ choices=["metric", "monitor"],
+ )
+ create_parser.add_argument("--name", help="name of the SLO", default=None)
+ create_parser.add_argument("--description", help="description of the SLO", default=None)
+ create_parser.add_argument(
+ "--tags",
+ help="comma-separated list of tags",
+ default=None,
+ type=comma_list_or_empty,
+ )
+ create_parser.add_argument(
+ "--thresholds",
+ help="comma separated list of :[:[:[:]]",
+ )
+ create_parser.add_argument(
+ "--numerator",
+ help="numerator metric query (sum of good events)",
+ default=None,
+ )
+ create_parser.add_argument(
+ "--denominator",
+ help="denominator metric query (sum of total events)",
+ default=None,
+ )
+ create_parser.add_argument(
+ "--monitor_ids",
+ help="explicit monitor_ids to use (CSV)",
+ default=None,
+ type=set_of_ints,
+ )
+ create_parser.add_argument("--monitor_search", help="monitor search terms to use", default=None)
+ create_parser.add_argument(
+ "--groups",
+ help="for a single monitor you can specify the specific groups as a pipe (|) delimited string",
+ default=None,
+ type=comma_list_or_empty,
+ )
+ create_parser.set_defaults(func=cls._create)
+
+ file_create_parser = verb_parsers.add_parser("fcreate", help="Create a SLO from file")
+ file_create_parser.add_argument("file", help="json file holding all details", type=argparse.FileType("r"))
+ file_create_parser.set_defaults(func=cls._file_create)
+
+ update_parser = verb_parsers.add_parser("update", help="Update existing SLO")
+ update_parser.add_argument("slo_id", help="SLO to replace with the new definition")
+ update_parser.add_argument(
+ "--type",
+ help="type of the SLO (must specify it's original type)",
+ choices=["metric", "monitor"],
+ )
+ update_parser.add_argument("--name", help="name of the SLO", default=None)
+ update_parser.add_argument("--description", help="description of the SLO", default=None)
+ update_parser.add_argument(
+ "--thresholds",
+ help="comma separated list of :[:[:[:]]",
+ )
+ update_parser.add_argument(
+ "--tags",
+ help="comma-separated list of tags",
+ default=None,
+ type=comma_list_or_empty,
+ )
+ update_parser.add_argument(
+ "--numerator",
+ help="numerator metric query (sum of good events)",
+ default=None,
+ )
+ update_parser.add_argument(
+ "--denominator",
+ help="denominator metric query (sum of total events)",
+ default=None,
+ )
+ update_parser.add_argument(
+ "--monitor_ids",
+ help="explicit monitor_ids to use (CSV)",
+ default=[],
+ type=list,
+ )
+ update_parser.add_argument("--monitor_search", help="monitor search terms to use", default=None)
+ update_parser.add_argument(
+ "--groups",
+ help="for a single monitor you can specify the specific groups as a pipe (|) delimited string",
+ default=None,
+ )
+ update_parser.set_defaults(func=cls._update)
+
+ file_update_parser = verb_parsers.add_parser("fupdate", help="Update existing SLO from file")
+ file_update_parser.add_argument("file", help="json file holding all details", type=argparse.FileType("r"))
+ file_update_parser.set_defaults(func=cls._file_update)
+
+ show_parser = verb_parsers.add_parser("show", help="Show a SLO definition")
+ show_parser.add_argument("slo_id", help="SLO to show")
+ show_parser.set_defaults(func=cls._show)
+
+ show_all_parser = verb_parsers.add_parser("show_all", help="Show a list of all SLOs")
+ show_all_parser.add_argument("--query", help="string to filter SLOs by query (see UI or documentation)")
+ show_all_parser.add_argument(
+ "--slo_ids",
+ help="comma separated list indicating what SLO IDs to get at once",
+ type=comma_set,
+ )
+ show_all_parser.add_argument("--offset", help="offset of query pagination", default=0)
+ show_all_parser.add_argument("--limit", help="limit of query pagination", default=100)
+ show_all_parser.set_defaults(func=cls._show_all)
+
+ delete_parser = verb_parsers.add_parser("delete", help="Delete a SLO")
+ delete_parser.add_argument("slo_id", help="SLO to delete")
+ delete_parser.set_defaults(func=cls._delete)
+
+ delete_many_parser = verb_parsers.add_parser("delete_many", help="Delete a SLO")
+ delete_many_parser.add_argument("slo_ids", help="comma separated list of SLO IDs to delete", type=comma_set)
+ delete_many_parser.set_defaults(func=cls._delete_many)
+
+ delete_timeframe_parser = verb_parsers.add_parser("delete_many_timeframe", help="Delete a SLO timeframe")
+ delete_timeframe_parser.add_argument("slo_id", help="SLO ID to update")
+ delete_timeframe_parser.add_argument(
+ "timeframes",
+ help="CSV of timeframes to delete, e.g. 7d,30d,90d",
+ type=comma_set,
+ )
+ delete_timeframe_parser.set_defaults(func=cls._delete_timeframe)
+
+ can_delete_parser = verb_parsers.add_parser("can_delete", help="Check if can delete SLOs")
+ can_delete_parser.add_argument("slo_ids", help="comma separated list of SLO IDs to delete", type=comma_set)
+ can_delete_parser.set_defaults(func=cls._can_delete)
+
+ history_parser = verb_parsers.add_parser("history", help="Get the SLO history")
+ history_parser.add_argument("slo_id", help="SLO to query the history")
+ history_parser.add_argument(
+ "from_ts",
+ type=parse_date_as_epoch_timestamp,
+ help="`from` date or timestamp",
+ )
+ history_parser.add_argument(
+ "to_ts",
+ type=parse_date_as_epoch_timestamp,
+ help="`to` date or timestamp",
+ )
+ history_parser.set_defaults(func=cls._history)
+
+ @classmethod
+ def _create(cls, args):
+ api._timeout = args.timeout
+ format = args.format
+
+ params = {"type": args.type, "name": args.name}
+
+ if args.tags:
+ tags = sorted(set([t.strip() for t in args.tags.split(",") if t.strip()]))
+ params["tags"] = tags
+
+ thresholds = []
+ for threshold_str in args.thresholds.split(","):
+ parts = threshold_str.split(":")
+ timeframe = parts[0]
+ target = float(parts[1])
+
+ threshold = {"timeframe": timeframe, "target": target}
+
+ if len(parts) > 2:
+ threshold["warning"] = float(parts[2])
+
+ if len(parts) > 3 and parts[3]:
+ threshold["target_display"] = parts[3]
+
+ if len(parts) > 4 and parts[4]:
+ threshold["warning_display"] = parts[4]
+
+ thresholds.append(threshold)
+ params["thresholds"] = thresholds
+
+ if args.description:
+ params["description"] = args.description
+
+ if args.type == "metric":
+ params["query"] = {
+ "numerator": args.numerator,
+ "denominator": args.denominator,
+ }
+ elif args.monitor_search:
+ params["monitor_search"] = args.monitor_search
+ else:
+ params["monitor_ids"] = list(args.monitor_ids)
+ if args.groups and len(args.monitor_ids) == 1:
+ groups = args.groups.split("|")
+ params["groups"] = groups
+
+ if args.tags:
+ params["tags"] = args.tags
+
+ res = api.ServiceLevelObjective.create(**params)
+ report_warnings(res)
+ report_errors(res)
+ if format == "pretty":
+ print(pretty_json(res))
+ else:
+ print(json.dumps(res))
+
+ @classmethod
+ def _file_create(cls, args):
+ api._timeout = args.timeout
+ format = args.format
+ slo = json.load(args.file)
+ res = api.ServiceLevelObjective.create(return_raw=True, **slo)
+ report_warnings(res)
+ report_errors(res)
+ if format == "pretty":
+ print(pretty_json(res))
+ else:
+ print(json.dumps(res))
+
+ @classmethod
+ def _update(cls, args):
+ api._timeout = args.timeout
+ format = args.format
+
+ params = {"type": args.type}
+
+ if args.thresholds:
+ thresholds = []
+ for threshold_str in args.thresholds.split(","):
+ parts = threshold_str.split(":")
+ timeframe = parts[0]
+ target = parts[1]
+
+ threshold = {"timeframe": timeframe, "target": target}
+
+ if len(parts) > 2:
+ threshold["warning"] = float(parts[2])
+
+ if len(parts) > 3 and parts[3]:
+ threshold["target_display"] = parts[3]
+
+ if len(parts) > 4 and parts[4]:
+ threshold["warning_display"] = parts[4]
+
+ thresholds.append(threshold)
+ params["thresholds"] = thresholds
+
+ if args.description:
+ params["description"] = args.description
+
+ if args.type == "metric":
+ if args.numerator and args.denominator:
+ params["query"] = {
+ "numerator": args.numerator,
+ "denominator": args.denominator,
+ }
+ elif args.monitor_search:
+ params["monitor_search"] = args.monitor_search
+ else:
+ params["monitor_ids"] = args.monitor_ids
+ if args.groups and len(args.monitor_ids) == 1:
+ groups = args.groups.split("|")
+ params["groups"] = groups
+
+ if args.tags:
+ tags = sorted(set([t.strip() for t in args.tags if t.strip()]))
+ params["tags"] = tags
+ res = api.ServiceLevelObjective.update(args.slo_id, return_raw=True, **params)
+ report_warnings(res)
+ report_errors(res)
+ if format == "pretty":
+ print(pretty_json(res))
+ else:
+ print(json.dumps(res))
+
+ @classmethod
+ def _file_update(cls, args):
+ api._timeout = args.timeout
+ format = args.format
+ slo = json.load(args.file)
+
+ res = api.ServiceLevelObjective.update(slo["id"], return_raw=True, **slo)
+ report_warnings(res)
+ report_errors(res)
+ if format == "pretty":
+ print(pretty_json(res))
+ else:
+ print(json.dumps(res))
+
+ @classmethod
+ def _show(cls, args):
+ api._timeout = args.timeout
+ format = args.format
+ res = api.ServiceLevelObjective.get(args.slo_id, return_raw=True)
+ report_warnings(res)
+ report_errors(res)
+
+ if args.string_ids:
+ res["id"] = str(res["id"])
+
+ if format == "pretty":
+ print(pretty_json(res))
+ else:
+ print(json.dumps(res))
+
+ @classmethod
+ def _show_all(cls, args):
+ api._timeout = args.timeout
+ format = args.format
+
+ params = {"offset": args.offset, "limit": args.limit}
+ if args.query:
+ params["query"] = args.query
+ else:
+ params["ids"] = args.slo_ids
+
+ res = api.ServiceLevelObjective.get_all(return_raw=True, **params)
+ report_warnings(res)
+ report_errors(res)
+
+ if format == "pretty":
+ print(pretty_json(res))
+ else:
+ print(json.dumps(res))
+
+ @classmethod
+ def _delete(cls, args):
+ api._timeout = args.timeout
+ res = api.ServiceLevelObjective.delete(args.slo_id, return_raw=True)
+ if res is not None:
+ report_warnings(res)
+ report_errors(res)
+
+ if format == "pretty":
+ print(pretty_json(res))
+ else:
+ print(json.dumps(res))
+
+ @classmethod
+ def _delete_many(cls, args):
+ api._timeout = args.timeout
+ res = api.ServiceLevelObjective.delete_many(args.slo_ids)
+ if res is not None:
+ report_warnings(res)
+ report_errors(res)
+
+ if format == "pretty":
+ print(pretty_json(res))
+ else:
+ print(json.dumps(res))
+
+ @classmethod
+ def _delete_timeframe(cls, args):
+ api._timeout = args.timeout
+
+ ops = {args.slo_id: args.timeframes}
+
+ res = api.ServiceLevelObjective.bulk_delete(ops)
+ if res is not None:
+ report_warnings(res)
+ report_errors(res)
+
+ if format == "pretty":
+ print(pretty_json(res))
+ else:
+ print(json.dumps(res))
+
+ @classmethod
+ def _can_delete(cls, args):
+ api._timeout = args.timeout
+
+ res = api.ServiceLevelObjective.can_delete(args.slo_ids)
+ if res is not None:
+ report_warnings(res)
+ report_errors(res)
+
+ if format == "pretty":
+ print(pretty_json(res))
+ else:
+ print(json.dumps(res))
+
+ @classmethod
+ def _history(cls, args):
+ api._timeout = args.timeout
+
+ res = api.ServiceLevelObjective.history(args.slo_id)
+ if res is not None:
+ report_warnings(res)
+ report_errors(res)
+
+ if format == "pretty":
+ print(pretty_json(res))
+ else:
+ print(json.dumps(res))
+
+ @classmethod
+ def _escape(cls, s):
+ return s.replace("\r", "\\r").replace("\n", "\\n").replace("\t", "\\t")
diff --git a/lambdas/aws-dd-forwarder-3.127.0/datadog/dogshell/tag.py b/lambdas/aws-dd-forwarder-3.127.0/datadog/dogshell/tag.py
new file mode 100644
index 0000000..3d4d2b9
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/datadog/dogshell/tag.py
@@ -0,0 +1,120 @@
+# Unless explicitly stated otherwise all files in this repository are licensed under the BSD-3-Clause License.
+# This product includes software developed at Datadog (https://www.datadoghq.com/).
+# Copyright 2015-Present Datadog, Inc
+# stdlib
+import json
+
+# datadog
+from datadog import api
+from datadog.dogshell.common import report_errors, report_warnings
+
+
+class TagClient(object):
+ @classmethod
+ def setup_parser(cls, subparsers):
+ parser = subparsers.add_parser("tag", help="View and modify host tags.")
+ verb_parsers = parser.add_subparsers(title="Verbs", dest="verb")
+ verb_parsers.required = True
+
+ add_parser = verb_parsers.add_parser(
+ "add", help="Add a host to one or more tags.", description="Hosts can be specified by name or id."
+ )
+ add_parser.add_argument("host", help="host to add")
+ add_parser.add_argument("tag", help="tag to add host to (one or more, space separated)", nargs="+")
+ add_parser.set_defaults(func=cls._add)
+
+ replace_parser = verb_parsers.add_parser(
+ "replace",
+ help="Replace all tags with one or more new tags.",
+ description="Hosts can be specified by name or id.",
+ )
+ replace_parser.add_argument("host", help="host to modify")
+ replace_parser.add_argument("tag", help="list of tags to add host to", nargs="+")
+ replace_parser.set_defaults(func=cls._replace)
+
+ show_parser = verb_parsers.add_parser(
+ "show", help="Show host tags.", description="Hosts can be specified by name or id."
+ )
+ show_parser.add_argument("host", help="host to show (or 'all' to show all tags)")
+ show_parser.set_defaults(func=cls._show)
+
+ detach_parser = verb_parsers.add_parser(
+ "detach", help="Remove a host from all tags.", description="Hosts can be specified by name or id."
+ )
+ detach_parser.add_argument("host", help="host to detach")
+ detach_parser.set_defaults(func=cls._detach)
+
+ @classmethod
+ def _add(cls, args):
+ api._timeout = args.timeout
+ format = args.format
+ res = api.Tag.create(args.host, tags=args.tag)
+ report_warnings(res)
+ report_errors(res)
+ if format == "pretty":
+ print("Tags for '%s':" % res["host"])
+ for c in res["tags"]:
+ print(" " + c)
+ elif format == "raw":
+ print(json.dumps(res))
+ else:
+ for c in res["tags"]:
+ print(c)
+
+ @classmethod
+ def _replace(cls, args):
+ api._timeout = args.timeout
+ format = args.format
+ res = api.Tag.update(args.host, tags=args.tag)
+ report_warnings(res)
+ report_errors(res)
+ if format == "pretty":
+ print("Tags for '%s':" % res["host"])
+ for c in res["tags"]:
+ print(" " + c)
+ elif format == "raw":
+ print(json.dumps(res))
+ else:
+ for c in res["tags"]:
+ print(c)
+
+ @classmethod
+ def _show(cls, args):
+ api._timeout = args.timeout
+ format = args.format
+ if args.host == "all":
+ res = api.Tag.get_all()
+ else:
+ res = api.Tag.get(args.host)
+ report_warnings(res)
+ report_errors(res)
+ if args.host == "all":
+ if format == "pretty":
+ for tag, hosts in list(res["tags"].items()):
+ for host in hosts:
+ print(tag)
+ print(" " + host)
+ print()
+ elif format == "raw":
+ print(json.dumps(res))
+ else:
+ for tag, hosts in list(res["tags"].items()):
+ for host in hosts:
+ print(tag + "\t" + host)
+ else:
+ if format == "pretty":
+ for tag in res["tags"]:
+ print(tag)
+ elif format == "raw":
+ print(json.dumps(res))
+ else:
+ for tag in res["tags"]:
+ print(tag)
+
+ @classmethod
+ def _detach(cls, args):
+ api._timeout = args.timeout
+ res = api.Tag.delete(args.host)
+ if res is not None:
+ report_warnings(res)
+ report_errors(res)
diff --git a/lambdas/aws-dd-forwarder-3.127.0/datadog/dogshell/timeboard.py b/lambdas/aws-dd-forwarder-3.127.0/datadog/dogshell/timeboard.py
new file mode 100644
index 0000000..477a1b6
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/datadog/dogshell/timeboard.py
@@ -0,0 +1,358 @@
+# Unless explicitly stated otherwise all files in this repository are licensed under the BSD-3-Clause License.
+# This product includes software developed at Datadog (https://www.datadoghq.com/).
+# Copyright 2015-Present Datadog, Inc
+# stdlib
+import json
+import os.path
+import platform
+import sys
+import webbrowser
+
+# 3p
+import argparse
+
+# datadog
+from datadog import api
+from datadog.dogshell.common import report_errors, report_warnings, print_err
+from datadog.util.format import pretty_json
+from datetime import datetime
+
+
+class TimeboardClient(object):
+ @classmethod
+ def setup_parser(cls, subparsers):
+ parser = subparsers.add_parser("timeboard", help="Create, edit, and delete timeboards")
+ parser.add_argument(
+ "--string_ids",
+ action="store_true",
+ dest="string_ids",
+ help="Represent timeboard IDs as strings instead of ints in JSON",
+ )
+
+ verb_parsers = parser.add_subparsers(title="Verbs", dest="verb")
+ verb_parsers.required = True
+
+ post_parser = verb_parsers.add_parser("post", help="Create timeboards")
+ post_parser.add_argument("title", help="title for the new timeboard")
+ post_parser.add_argument("description", help="short description of the timeboard")
+ post_parser.add_argument(
+ "graphs", help="graph definitions as a JSON string. if unset," " reads from stdin.", nargs="?"
+ )
+ post_parser.add_argument(
+ "--template_variables",
+ type=_template_variables,
+ default=[],
+ help="a json list of template variable dicts, e.g. "
+ "[{'name': 'host', 'prefix': 'host', "
+ "'default': 'host:my-host'}]'",
+ )
+
+ post_parser.set_defaults(func=cls._post)
+
+ update_parser = verb_parsers.add_parser("update", help="Update existing timeboards")
+ update_parser.add_argument("timeboard_id", help="timeboard to replace" " with the new definition")
+ update_parser.add_argument("title", help="new title for the timeboard")
+ update_parser.add_argument("description", help="short description of the timeboard")
+ update_parser.add_argument(
+ "graphs", help="graph definitions as a JSON string." " if unset, reads from stdin", nargs="?"
+ )
+ update_parser.add_argument(
+ "--template_variables",
+ type=_template_variables,
+ default=[],
+ help="a json list of template variable dicts, e.g. "
+ "[{'name': 'host', 'prefix': 'host', "
+ "'default': 'host:my-host'}]'",
+ )
+ update_parser.set_defaults(func=cls._update)
+
+ show_parser = verb_parsers.add_parser("show", help="Show a timeboard definition")
+ show_parser.add_argument("timeboard_id", help="timeboard to show")
+ show_parser.set_defaults(func=cls._show)
+
+ show_all_parser = verb_parsers.add_parser("show_all", help="Show a list of all timeboards")
+ show_all_parser.set_defaults(func=cls._show_all)
+
+ pull_parser = verb_parsers.add_parser("pull", help="Pull a timeboard on the server" " into a local file")
+ pull_parser.add_argument("timeboard_id", help="ID of timeboard to pull")
+ pull_parser.add_argument("filename", help="file to pull timeboard into")
+ pull_parser.set_defaults(func=cls._pull)
+
+ pull_all_parser = verb_parsers.add_parser("pull_all", help="Pull all timeboards" " into files in a directory")
+ pull_all_parser.add_argument("pull_dir", help="directory to pull timeboards into")
+ pull_all_parser.set_defaults(func=cls._pull_all)
+
+ push_parser = verb_parsers.add_parser(
+ "push", help="Push updates to timeboards" " from local files to the server"
+ )
+ push_parser.add_argument(
+ "--append_auto_text",
+ action="store_true",
+ dest="append_auto_text",
+ help="When pushing to the server, appends filename"
+ " and timestamp to the end of the timeboard description",
+ )
+ push_parser.add_argument(
+ "file", help="timeboard files to push to the server", nargs="+", type=argparse.FileType("r")
+ )
+ push_parser.set_defaults(func=cls._push)
+
+ new_file_parser = verb_parsers.add_parser(
+ "new_file", help="Create a new timeboard" " and put its contents in a file"
+ )
+ new_file_parser.add_argument("filename", help="name of file to create with empty timeboard")
+ new_file_parser.add_argument(
+ "graphs", help="graph definitions as a JSON string." " if unset, reads from stdin.", nargs="?"
+ )
+ new_file_parser.set_defaults(func=cls._new_file)
+
+ web_view_parser = verb_parsers.add_parser("web_view", help="View the timeboard in a web browser")
+ web_view_parser.add_argument("file", help="timeboard file", type=argparse.FileType("r"))
+ web_view_parser.set_defaults(func=cls._web_view)
+
+ delete_parser = verb_parsers.add_parser("delete", help="Delete timeboards")
+ delete_parser.add_argument("timeboard_id", help="timeboard to delete")
+ delete_parser.set_defaults(func=cls._delete)
+
+ @classmethod
+ def _pull(cls, args):
+ cls._write_dash_to_file(args.timeboard_id, args.filename, args.timeout, args.format, args.string_ids)
+
+ @classmethod
+ def _pull_all(cls, args):
+ api._timeout = args.timeout
+
+ def _title_to_filename(title):
+ # Get a lowercased version with most punctuation stripped out...
+ no_punct = "".join([c for c in title.lower() if c.isalnum() or c in [" ", "_", "-"]])
+ # Now replace all -'s, _'s and spaces with "_", and strip trailing _
+ return no_punct.replace(" ", "_").replace("-", "_").strip("_")
+
+ format = args.format
+ res = api.Timeboard.get_all()
+ report_warnings(res)
+ report_errors(res)
+
+ if not os.path.exists(args.pull_dir):
+ os.mkdir(args.pull_dir, 0o755)
+
+ used_filenames = set()
+ for dash_summary in res["dashes"]:
+ filename = _title_to_filename(dash_summary["title"])
+ if filename in used_filenames:
+ filename = filename + "-" + dash_summary["id"]
+ used_filenames.add(filename)
+
+ cls._write_dash_to_file(
+ dash_summary["id"],
+ os.path.join(args.pull_dir, filename + ".json"),
+ args.timeout,
+ format,
+ args.string_ids,
+ )
+ if format == "pretty":
+ print(
+ ("\n### Total: {0} dashboards to {1} ###".format(len(used_filenames), os.path.realpath(args.pull_dir)))
+ )
+
+ @classmethod
+ def _new_file(cls, args):
+ api._timeout = args.timeout
+ format = args.format
+ graphs = args.graphs
+ if args.graphs is None:
+ graphs = sys.stdin.read()
+ graphs = json.loads(graphs)
+ res = api.Timeboard.create(
+ title=args.filename, description="Description for {0}".format(args.filename), graphs=[graphs]
+ )
+
+ report_warnings(res)
+ report_errors(res)
+
+ cls._write_dash_to_file(res["dash"]["id"], args.filename, args.timeout, format, args.string_ids)
+
+ if format == "pretty":
+ print(pretty_json(res))
+ else:
+ print(json.dumps(res))
+
+ @classmethod
+ def _write_dash_to_file(cls, dash_id, filename, timeout, format="raw", string_ids=False):
+ with open(filename, "w") as f:
+ res = api.Timeboard.get(dash_id)
+ report_warnings(res)
+ report_errors(res)
+
+ dash_obj = res["dash"]
+ if "resource" in dash_obj:
+ del dash_obj["resource"]
+ if "url" in dash_obj:
+ del dash_obj["url"]
+
+ if string_ids:
+ dash_obj["id"] = str(dash_obj["id"])
+
+ if not dash_obj.get("template_variables"):
+ dash_obj.pop("template_variables", None)
+
+ json.dump(dash_obj, f, indent=2)
+
+ if format == "pretty":
+ print(u"Downloaded dashboard {0} to file {1}".format(dash_id, filename))
+ else:
+ print(u"{0} {1}".format(dash_id, filename))
+
+ @classmethod
+ def _push(cls, args):
+ api._timeout = args.timeout
+ for f in args.file:
+ try:
+ dash_obj = json.load(f)
+ except Exception as err:
+ raise Exception("Could not parse {0}: {1}".format(f.name, err))
+
+ if args.append_auto_text:
+ datetime_str = datetime.now().strftime("%x %X")
+ auto_text = "
\nUpdated at {0} from {1} ({2}) on {3}".format(
+ datetime_str, f.name, dash_obj["id"], platform.node()
+ )
+ dash_obj["description"] += auto_text
+ tpl_vars = dash_obj.get("template_variables", [])
+
+ if "id" in dash_obj:
+ # Always convert to int, in case it was originally a string.
+ dash_obj["id"] = int(dash_obj["id"])
+ res = api.Timeboard.update(
+ dash_obj["id"],
+ title=dash_obj["title"],
+ description=dash_obj["description"],
+ graphs=dash_obj["graphs"],
+ template_variables=tpl_vars,
+ )
+ else:
+ res = api.Timeboard.create(
+ title=dash_obj["title"],
+ description=dash_obj["description"],
+ graphs=dash_obj["graphs"],
+ template_variables=tpl_vars,
+ )
+
+ if "errors" in res:
+ print_err("Upload of dashboard {0} from file {1} failed.".format(dash_obj["id"], f.name))
+
+ report_warnings(res)
+ report_errors(res)
+
+ if format == "pretty":
+ print(pretty_json(res))
+ else:
+ print(json.dumps(res))
+
+ if args.format == "pretty":
+ print("Uploaded file {0} (dashboard {1})".format(f.name, dash_obj["id"]))
+
+ @classmethod
+ def _post(cls, args):
+ api._timeout = args.timeout
+ format = args.format
+ graphs = args.graphs
+ if args.graphs is None:
+ graphs = sys.stdin.read()
+ graphs = json.loads(graphs)
+ res = api.Timeboard.create(
+ title=args.title, description=args.description, graphs=[graphs], template_variables=args.template_variables
+ )
+ report_warnings(res)
+ report_errors(res)
+ if format == "pretty":
+ print(pretty_json(res))
+ else:
+ print(json.dumps(res))
+
+ @classmethod
+ def _update(cls, args):
+ api._timeout = args.timeout
+ format = args.format
+ graphs = args.graphs
+ if args.graphs is None:
+ graphs = sys.stdin.read()
+ graphs = json.loads(graphs)
+
+ res = api.Timeboard.update(
+ args.timeboard_id,
+ title=args.title,
+ description=args.description,
+ graphs=graphs,
+ template_variables=args.template_variables,
+ )
+ report_warnings(res)
+ report_errors(res)
+ if format == "pretty":
+ print(pretty_json(res))
+ else:
+ print(json.dumps(res))
+
+ @classmethod
+ def _show(cls, args):
+ api._timeout = args.timeout
+ format = args.format
+ res = api.Timeboard.get(args.timeboard_id)
+ report_warnings(res)
+ report_errors(res)
+
+ if args.string_ids:
+ res["dash"]["id"] = str(res["dash"]["id"])
+
+ if format == "pretty":
+ print(pretty_json(res))
+ else:
+ print(json.dumps(res))
+
+ @classmethod
+ def _show_all(cls, args):
+ api._timeout = args.timeout
+ format = args.format
+ res = api.Timeboard.get_all()
+ report_warnings(res)
+ report_errors(res)
+
+ if args.string_ids:
+ for d in res["dashes"]:
+ d["id"] = str(d["id"])
+
+ if format == "pretty":
+ print(pretty_json(res))
+ elif format == "raw":
+ print(json.dumps(res))
+ else:
+ for d in res["dashes"]:
+ print("\t".join([(d["id"]), (d["resource"]), (d["title"]), cls._escape(d["description"])]))
+
+ @classmethod
+ def _delete(cls, args):
+ api._timeout = args.timeout
+ res = api.Timeboard.delete(args.timeboard_id)
+ if res is not None:
+ report_warnings(res)
+ report_errors(res)
+
+ @classmethod
+ def _web_view(cls, args):
+ dash_id = json.load(args.file)["id"]
+ url = api._api_host + "/dash/dash/{0}".format(dash_id)
+ webbrowser.open(url)
+
+ @classmethod
+ def _escape(cls, s):
+ return s.replace("\r", "\\r").replace("\n", "\\n").replace("\t", "\\t") if s else ""
+
+
+def _template_variables(tpl_var_input):
+ if "[" not in tpl_var_input:
+ return [v.strip() for v in tpl_var_input.split(",")]
+ else:
+ try:
+ return json.loads(tpl_var_input)
+ except Exception:
+ raise argparse.ArgumentTypeError("bad template_variable json parameter")
diff --git a/lambdas/aws-dd-forwarder-3.127.0/datadog/dogshell/wrap.py b/lambdas/aws-dd-forwarder-3.127.0/datadog/dogshell/wrap.py
new file mode 100644
index 0000000..25df6d9
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/datadog/dogshell/wrap.py
@@ -0,0 +1,520 @@
+# Unless explicitly stated otherwise all files in this repository are licensed under the BSD-3-Clause License.
+# This product includes software developed at Datadog (https://www.datadoghq.com/).
+# Copyright 2015-Present Datadog, Inc
+"""
+
+Wraps shell commands and sends the result to Datadog as events. Ex:
+
+dogwrap -n test-job -k $API_KEY --submit_mode all "ls -lah"
+
+Note that you need to enclose your command in quotes to prevent python
+from thinking the command line arguments belong to the python command
+instead of the wrapped command.
+
+You can also have the script only send events if they fail:
+
+dogwrap -n test-job -k $API_KEY --submit_mode errors "ls -lah"
+
+And you can give the command a timeout too:
+
+dogwrap -n test-job -k $API_KEY --timeout=1 "sleep 3"
+
+"""
+# stdlib
+from __future__ import print_function
+
+import os
+from copy import copy
+import optparse
+import subprocess
+import sys
+import threading
+import time
+import warnings
+
+# datadog
+from datadog import initialize, api, __version__
+from datadog.util.compat import is_p3k
+
+
+SUCCESS = "success"
+ERROR = "error"
+WARNING = "warning"
+
+MAX_EVENT_BODY_LENGTH = 3000
+
+
+class Timeout(Exception):
+ pass
+
+
+class OutputReader(threading.Thread):
+ """
+ Thread collecting the output of a subprocess, optionally forwarding it to
+ a given file descriptor and storing it for further retrieval.
+ """
+
+ def __init__(self, proc_out, fwd_out=None):
+ """
+ Instantiates an OutputReader.
+ :param proc_out: the output to read
+ :type proc_out: file descriptor
+ :param fwd_out: the output to forward to (None to disable forwarding)
+ :type fwd_out: file descriptor or None
+ """
+ threading.Thread.__init__(self)
+ self.daemon = True
+ self._out_content = b""
+ self._out = proc_out
+ self._fwd_out = fwd_out
+
+ def run(self):
+ """
+ Thread's main loop: collects the output optionnally forwarding it to
+ the file descriptor passed in the constructor.
+ """
+ for line in iter(self._out.readline, b""):
+ if self._fwd_out is not None:
+ self._fwd_out.write(line)
+ self._out_content += line
+ self._out.close()
+
+ @property
+ def content(self):
+ """
+ The content stored in out so far. (Not threadsafe, wait with .join())
+ """
+ return self._out_content
+
+
+def poll_proc(proc, sleep_interval, timeout):
+ """
+ Polls the process until it returns or a given timeout has been reached
+ """
+ start_time = time.time()
+ returncode = None
+ while returncode is None:
+ returncode = proc.poll()
+ if time.time() - start_time > timeout:
+ raise Timeout()
+ else:
+ time.sleep(sleep_interval)
+ return returncode
+
+
+def execute(cmd, cmd_timeout, sigterm_timeout, sigkill_timeout, proc_poll_interval, buffer_outs):
+ """
+ Launches the process and monitors its outputs
+ """
+ start_time = time.time()
+ returncode = -1
+ stdout = b""
+ stderr = b""
+ try:
+ proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True)
+ except Exception:
+ print(u"Failed to execute %s" % (repr(cmd)), file=sys.stderr)
+ raise
+ try:
+ # Let's that the threads collecting the output from the command in the
+ # background
+ stdout_buffer = sys.stdout.buffer if is_p3k() else sys.stdout
+ stderr_buffer = sys.stderr.buffer if is_p3k() else sys.stderr
+ out_reader = OutputReader(proc.stdout, stdout_buffer if not buffer_outs else None)
+ err_reader = OutputReader(proc.stderr, stderr_buffer if not buffer_outs else None)
+ out_reader.start()
+ err_reader.start()
+
+ # Let's quietly wait from the program's completion here to get the exit
+ # code when it finishes
+ returncode = poll_proc(proc, proc_poll_interval, cmd_timeout)
+ except Timeout:
+ returncode = Timeout
+ sigterm_start = time.time()
+ print("Command timed out after %.2fs, killing with SIGTERM" % (time.time() - start_time), file=sys.stderr)
+ try:
+ proc.terminate()
+ try:
+ poll_proc(proc, proc_poll_interval, sigterm_timeout)
+ except Timeout:
+ print(
+ "SIGTERM timeout failed after %.2fs, killing with SIGKILL" % (time.time() - sigterm_start),
+ file=sys.stderr,
+ )
+ sigkill_start = time.time()
+ proc.kill()
+ try:
+ poll_proc(proc, proc_poll_interval, sigkill_timeout)
+ except Timeout:
+ print(
+ "SIGKILL timeout failed after %.2fs, exiting" % (time.time() - sigkill_start), file=sys.stderr
+ )
+ except OSError as e:
+ # Ignore OSError 3: no process found.
+ if e.errno != 3:
+ raise
+
+ # Let's harvest the outputs collected by our background threads
+ # after making sure they're done reading it.
+ out_reader.join()
+ err_reader.join()
+ stdout = out_reader.content
+ stderr = err_reader.content
+
+ duration = time.time() - start_time
+
+ return returncode, stdout, stderr, duration
+
+
+def trim_text(text, max_len):
+ """
+ Trim input text to fit the `max_len` condition.
+
+ If trim is needed: keep the first 1/3rd of the budget on the top,
+ and the other 2 thirds on the bottom.
+ """
+ if len(text) <= max_len:
+ return text
+
+ trimmed_text = (
+ u"{top_third}\n"
+ u"```\n"
+ u"*...trimmed...*\n"
+ u"```\n"
+ u"{bottom_two_third}\n".format(
+ top_third=text[: max_len // 3], bottom_two_third=text[len(text) - (2 * max_len) // 3 :]
+ )
+ )
+
+ return trimmed_text
+
+
+def build_event_body(cmd, returncode, stdout, stderr, notifications):
+ """
+ Format and return an event body.
+
+ Note: do not exceed MAX_EVENT_BODY_LENGTH length.
+ """
+ fmt_stdout = u""
+ fmt_stderr = u""
+ fmt_notifications = u""
+
+ max_length = MAX_EVENT_BODY_LENGTH // 2 if stdout and stderr else MAX_EVENT_BODY_LENGTH
+
+ if stdout:
+ fmt_stdout = u"**>>>> STDOUT <<<<**\n```\n{stdout} \n```\n".format(
+ stdout=trim_text(stdout.decode("utf-8", "replace"), max_length)
+ )
+
+ if stderr:
+ fmt_stderr = u"**>>>> STDERR <<<<**\n```\n{stderr} \n```\n".format(
+ stderr=trim_text(stderr.decode("utf-8", "replace"), max_length)
+ )
+
+ if notifications:
+ notifications = notifications.decode("utf-8", "replace") if isinstance(notifications, bytes) else notifications
+ fmt_notifications = u"**>>>> NOTIFICATIONS <<<<**\n\n {notifications}\n".format(notifications=notifications)
+
+ return (
+ u"%%%\n"
+ u"**>>>> CMD <<<<**\n```\n{command} \n```\n"
+ u"**>>>> EXIT CODE <<<<**\n\n {returncode}\n\n\n"
+ u"{stdout}"
+ u"{stderr}"
+ u"{notifications}"
+ u"%%%\n".format(
+ command=cmd,
+ returncode=returncode,
+ stdout=fmt_stdout,
+ stderr=fmt_stderr,
+ notifications=fmt_notifications,
+ )
+ )
+
+
+def generate_warning_codes(option, opt, options_warning):
+ try:
+ # options_warning is a string e.g.: --warning_codes 123,456,789
+ # we need to create a list from it
+ warning_codes = options_warning.split(",")
+ return warning_codes
+ except ValueError:
+ raise optparse.OptionValueError("option %s: invalid warning codes value(s): %r" % (opt, options_warning))
+
+
+class DogwrapOption(optparse.Option):
+ # https://docs.python.org/3.7/library/optparse.html#adding-new-types
+ TYPES = optparse.Option.TYPES + ("warning_codes",)
+ TYPE_CHECKER = copy(optparse.Option.TYPE_CHECKER)
+ TYPE_CHECKER["warning_codes"] = generate_warning_codes
+
+
+def parse_options(raw_args=None):
+ """
+ Parse the raw command line options into an options object and the remaining command string
+ """
+ parser = optparse.OptionParser(
+ usage='%prog -n [event_name] -k [api_key] --submit_mode \
+[ all | errors | warnings] [options] "command". \n\nNote that you need to enclose your command in \
+quotes to prevent python executing as soon as there is a space in your command. \n \nNOTICE: In \
+normal mode, the whole stderr is printed before stdout, in flush_live mode they will be mixed but \
+there is not guarantee that messages sent by the command on both stderr and stdout are printed in \
+the order they were sent.',
+ version="%prog {0}".format(__version__),
+ option_class=DogwrapOption,
+ )
+
+ parser.add_option(
+ "-n",
+ "--name",
+ action="store",
+ type="string",
+ help="the name of the event \
+as it should appear on your Datadog stream",
+ )
+ parser.add_option(
+ "-k",
+ "--api_key",
+ action="store",
+ type="string",
+ help="your DataDog API Key",
+ default=os.environ.get("DD_API_KEY"),
+ )
+ parser.add_option(
+ "-s",
+ "--site",
+ action="store",
+ type="string",
+ default="datadoghq.com",
+ help="The site to send data. Accepts us (datadoghq.com), eu (datadoghq.eu), \
+us3 (us3.datadoghq.com), us5 (us5.datadoghq.com), or ap1 (ap1.datadoghq.com), \
+gov (ddog-gov.com), or custom url. default: us",
+ )
+ parser.add_option(
+ "-m",
+ "--submit_mode",
+ action="store",
+ type="choice",
+ default="errors",
+ choices=["errors", "warnings", "all"],
+ help="[ all | errors | warnings ] if set \
+to error, an event will be sent only of the command exits with a non zero exit status or if it \
+times out. If set to warning, a list of exit codes need to be provided",
+ )
+ parser.add_option(
+ "--warning_codes",
+ action="store",
+ type="warning_codes",
+ dest="warning_codes",
+ help="comma separated list of warning codes, e.g: 127,255",
+ )
+ parser.add_option(
+ "-p",
+ "--priority",
+ action="store",
+ type="choice",
+ choices=["normal", "low"],
+ help="the priority of the event (default: 'normal')",
+ )
+ parser.add_option(
+ "-t",
+ "--timeout",
+ action="store",
+ type="int",
+ default=60 * 60 * 24,
+ help="(in seconds) a timeout after which your command must be aborted. An \
+event will be sent to your DataDog stream (default: 24hours)",
+ )
+ parser.add_option(
+ "--sigterm_timeout",
+ action="store",
+ type="int",
+ default=60 * 2,
+ help="(in seconds) When your command times out, the \
+process it triggers is sent a SIGTERM. If this sigterm_timeout is reached, it will be sent a \
+SIGKILL signal. (default: 2m)",
+ )
+ parser.add_option(
+ "--sigkill_timeout",
+ action="store",
+ type="int",
+ default=60,
+ help="(in seconds) how long to wait at most after SIGKILL \
+ has been sent (default: 60s)",
+ )
+ parser.add_option(
+ "--proc_poll_interval",
+ action="store",
+ type="float",
+ default=0.5,
+ help="(in seconds). interval at which your command will be polled \
+(default: 500ms)",
+ )
+ parser.add_option(
+ "--notify_success",
+ action="store",
+ type="string",
+ default="",
+ help="a message string and @people directives to send notifications in \
+case of success.",
+ )
+ parser.add_option(
+ "--notify_error",
+ action="store",
+ type="string",
+ default="",
+ help="a message string and @people directives to send notifications in \
+case of error.",
+ )
+ parser.add_option(
+ "--notify_warning",
+ action="store",
+ type="string",
+ default="",
+ help="a message string and @people directives to send notifications in \
+ case of warning.",
+ )
+ parser.add_option(
+ "-b",
+ "--buffer_outs",
+ action="store_true",
+ dest="buffer_outs",
+ default=False,
+ help="displays the stderr and stdout of the command only once it has \
+returned (the command outputs remains buffered in dogwrap meanwhile)",
+ )
+ parser.add_option(
+ "--send_metric",
+ action="store_true",
+ dest="send_metric",
+ default=False,
+ help="sends a metric for event duration",
+ )
+ parser.add_option(
+ "--tags", action="store", type="string", dest="tags", default="", help="comma separated list of tags"
+ )
+
+ options, args = parser.parse_args(args=raw_args)
+
+ if is_p3k():
+ cmd = " ".join(args)
+ else:
+ cmd = b" ".join(args).decode("utf-8")
+
+ return options, cmd
+
+
+def main():
+ options, cmd = parse_options()
+
+ # If silent is checked we force the outputs to be buffered (and therefore
+ # not forwarded to the Terminal streams) and we just avoid printing the
+ # buffers at the end
+ returncode, stdout, stderr, duration = execute(
+ cmd,
+ options.timeout,
+ options.sigterm_timeout,
+ options.sigkill_timeout,
+ options.proc_poll_interval,
+ options.buffer_outs,
+ )
+
+ if options.site in ("datadoghq.com", "us"):
+ api_host = "https://api.datadoghq.com"
+ elif options.site in ("datadoghq.eu", "eu"):
+ api_host = "https://api.datadoghq.eu"
+ elif options.site in ("us3.datadoghq.com", "us3"):
+ api_host = "https://api.us3.datadoghq.com"
+ elif options.site in ("us5.datadoghq.com", "us5"):
+ api_host = "https://api.us5.datadoghq.com"
+ elif options.site in ("ap1.datadoghq.com", "ap1"):
+ api_host = "https://api.ap1.datadoghq.com"
+ elif options.site in ("ddog-gov.com", "gov"):
+ api_host = "https://api.ddog-gov.com"
+ else:
+ api_host = options.site
+
+ initialize(api_key=options.api_key, api_host=api_host)
+ host = api._host_name
+
+ warning_codes = None
+
+ if options.warning_codes:
+ # Convert warning codes from string to int since return codes will evaluate the latter
+ warning_codes = list(map(int, options.warning_codes))
+
+ if returncode == 0:
+ alert_type = SUCCESS
+ event_priority = "low"
+ event_title = u"[%s] %s succeeded in %.2fs" % (host, options.name, duration)
+ elif returncode != 0 and options.submit_mode == "warnings":
+ if not warning_codes:
+ # the list of warning codes is empty - the option was not specified
+ print("A comma separated list of exit codes need to be provided")
+ sys.exit()
+ elif returncode in warning_codes:
+ alert_type = WARNING
+ event_priority = "normal"
+ event_title = u"[%s] %s failed in %.2fs" % (host, options.name, duration)
+ else:
+ print("Command exited with a different exit code that the one(s) provided")
+ sys.exit()
+ else:
+ alert_type = ERROR
+ event_priority = "normal"
+
+ if returncode is Timeout:
+ event_title = u"[%s] %s timed out after %.2fs" % (host, options.name, duration)
+ returncode = -1
+ else:
+ event_title = u"[%s] %s failed in %.2fs" % (host, options.name, duration)
+
+ notifications = ""
+
+ if alert_type == SUCCESS and options.notify_success:
+ notifications = options.notify_success
+ elif alert_type == ERROR and options.notify_error:
+ notifications = options.notify_error
+ elif alert_type == WARNING and options.notify_warning:
+ notifications = options.notify_warning
+
+ if options.tags:
+ tags = [t.strip() for t in options.tags.split(",")]
+ else:
+ tags = None
+
+ event_body = build_event_body(cmd, returncode, stdout, stderr, notifications)
+
+ event = {
+ "alert_type": alert_type,
+ "aggregation_key": options.name,
+ "host": host,
+ "priority": options.priority or event_priority,
+ "tags": tags,
+ }
+
+ if options.buffer_outs:
+ if is_p3k():
+ stderr = stderr.decode("utf-8")
+ stdout = stdout.decode("utf-8")
+
+ print(stderr.strip(), file=sys.stderr)
+ print(stdout.strip(), file=sys.stdout)
+
+ if options.submit_mode == "all" or returncode != 0:
+ if options.send_metric:
+ event_name_tag = "event_name:{}".format(options.name)
+ if tags:
+ duration_tags = tags + [event_name_tag]
+ else:
+ duration_tags = [event_name_tag]
+ api.Metric.send(metric="dogwrap.duration", points=duration, tags=duration_tags, type="gauge")
+ api.Event.create(title=event_title, text=event_body, **event)
+
+ sys.exit(returncode)
+
+
+if __name__ == "__main__":
+ if sys.argv[0].endswith("dogwrap"):
+ warnings.warn("dogwrap is pending deprecation. Please use dogshellwrap instead.", PendingDeprecationWarning)
+ main()
diff --git a/lambdas/aws-dd-forwarder-3.127.0/datadog/dogstatsd/__init__.py b/lambdas/aws-dd-forwarder-3.127.0/datadog/dogstatsd/__init__.py
new file mode 100644
index 0000000..d6fa527
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/datadog/dogstatsd/__init__.py
@@ -0,0 +1,4 @@
+# Unless explicitly stated otherwise all files in this repository are licensed under the BSD-3-Clause License.
+# This product includes software developed at Datadog (https://www.datadoghq.com/).
+# Copyright 2015-Present Datadog, Inc
+from datadog.dogstatsd.base import DogStatsd, statsd # noqa
diff --git a/lambdas/aws-dd-forwarder-3.127.0/datadog/dogstatsd/base.py b/lambdas/aws-dd-forwarder-3.127.0/datadog/dogstatsd/base.py
new file mode 100644
index 0000000..1f58fe5
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/datadog/dogstatsd/base.py
@@ -0,0 +1,1398 @@
+#!/usr/bin/env python
+
+# Unless explicitly stated otherwise all files in this repository are licensed under
+# the BSD-3-Clause License. This product includes software developed at Datadog
+# (https://www.datadoghq.com/).
+# Copyright 2015-Present Datadog, Inc
+"""
+DogStatsd is a Python client for DogStatsd, a Statsd fork for Datadog.
+"""
+# Standard libraries
+from random import random
+import logging
+import os
+import socket
+import errno
+import threading
+import time
+from threading import Lock, RLock
+import weakref
+
+try:
+ import queue
+except ImportError:
+ # pypy has the same module, but capitalized.
+ import Queue as queue # type: ignore[no-redef]
+
+from typing import Optional, List, Text, Union
+
+# Datadog libraries
+from datadog.dogstatsd.context import (
+ TimedContextManagerDecorator,
+ DistributedContextManagerDecorator,
+)
+from datadog.dogstatsd.route import get_default_route
+from datadog.dogstatsd.container import ContainerID
+from datadog.util.compat import is_p3k, text
+from datadog.util.format import normalize_tags
+from datadog.version import __version__
+
+# Logging
+log = logging.getLogger("datadog.dogstatsd")
+
+# Default config
+DEFAULT_HOST = "localhost"
+DEFAULT_PORT = 8125
+
+# Buffering-related values (in seconds)
+DEFAULT_FLUSH_INTERVAL = 0.3
+MIN_FLUSH_INTERVAL = 0.0001
+
+# Tag name of entity_id
+ENTITY_ID_TAG_NAME = "dd.internal.entity_id"
+
+# Env var name of entity_id
+ENTITY_ID_ENV_VAR = "DD_ENTITY_ID"
+
+# Env var to enable/disable sending the container ID field
+ORIGIN_DETECTION_ENABLED = "DD_ORIGIN_DETECTION_ENABLED"
+
+# Default buffer settings based on socket type
+UDP_OPTIMAL_PAYLOAD_LENGTH = 1432
+UDS_OPTIMAL_PAYLOAD_LENGTH = 8192
+
+# Socket options
+MIN_SEND_BUFFER_SIZE = 32 * 1024
+
+# Mapping of each "DD_" prefixed environment variable to a specific tag name
+DD_ENV_TAGS_MAPPING = {
+ ENTITY_ID_ENV_VAR: ENTITY_ID_TAG_NAME,
+ "DD_ENV": "env",
+ "DD_SERVICE": "service",
+ "DD_VERSION": "version",
+}
+
+# Telemetry minimum flush interval in seconds
+DEFAULT_TELEMETRY_MIN_FLUSH_INTERVAL = 10
+
+# Telemetry pre-computed formatting string. Pre-computation
+# increases throughput of composing the result by 2-15% from basic
+# '%'-based formatting with a `join`.
+TELEMETRY_FORMATTING_STR = "\n".join(
+ [
+ "datadog.dogstatsd.client.metrics:%s|c|#%s",
+ "datadog.dogstatsd.client.events:%s|c|#%s",
+ "datadog.dogstatsd.client.service_checks:%s|c|#%s",
+ "datadog.dogstatsd.client.bytes_sent:%s|c|#%s",
+ "datadog.dogstatsd.client.bytes_dropped:%s|c|#%s",
+ "datadog.dogstatsd.client.bytes_dropped_queue:%s|c|#%s",
+ "datadog.dogstatsd.client.bytes_dropped_writer:%s|c|#%s",
+ "datadog.dogstatsd.client.packets_sent:%s|c|#%s",
+ "datadog.dogstatsd.client.packets_dropped:%s|c|#%s",
+ "datadog.dogstatsd.client.packets_dropped_queue:%s|c|#%s",
+ "datadog.dogstatsd.client.packets_dropped_writer:%s|c|#%s",
+ ]
+) + "\n"
+
+Stop = object()
+
+SUPPORTS_FORKING = hasattr(os, "register_at_fork") and not os.environ.get("DD_DOGSTATSD_DISABLE_FORK_SUPPORT", None)
+TRACK_INSTANCES = not os.environ.get("DD_DOGSTATSD_DISABLE_INSTANCE_TRACKING", None)
+
+_instances = weakref.WeakSet() # type: weakref.WeakSet
+
+
+def pre_fork():
+ """Prepare all client instances for a process fork.
+
+ If SUPPORTS_FORKING is true, this will be called automatically before os.fork().
+ """
+ for c in _instances:
+ c.pre_fork()
+
+
+def post_fork():
+ """Restore all client instances after a fork.
+
+ If SUPPORTS_FORKING is true, this will be called automatically after os.fork().
+ """
+ for c in _instances:
+ c.post_fork()
+
+
+if SUPPORTS_FORKING:
+ os.register_at_fork(before=pre_fork, after_in_child=post_fork, after_in_parent=post_fork) # type: ignore
+
+
+# pylint: disable=useless-object-inheritance,too-many-instance-attributes
+# pylint: disable=too-many-arguments,too-many-locals
+class DogStatsd(object):
+ OK, WARNING, CRITICAL, UNKNOWN = (0, 1, 2, 3)
+
+ def __init__(
+ self,
+ host=DEFAULT_HOST, # type: Text
+ port=DEFAULT_PORT, # type: int
+ max_buffer_size=None, # type: None
+ flush_interval=DEFAULT_FLUSH_INTERVAL, # type: float
+ disable_buffering=True, # type: bool
+ namespace=None, # type: Optional[Text]
+ constant_tags=None, # type: Optional[List[str]]
+ use_ms=False, # type: bool
+ use_default_route=False, # type: bool
+ socket_path=None, # type: Optional[Text]
+ default_sample_rate=1, # type: float
+ disable_telemetry=False, # type: bool
+ telemetry_min_flush_interval=(DEFAULT_TELEMETRY_MIN_FLUSH_INTERVAL), # type: int
+ telemetry_host=None, # type: Text
+ telemetry_port=None, # type: Union[str, int]
+ telemetry_socket_path=None, # type: Text
+ max_buffer_len=0, # type: int
+ container_id=None, # type: Optional[Text]
+ origin_detection_enabled=True, # type: bool
+ socket_timeout=0, # type: Optional[float]
+ telemetry_socket_timeout=0, # type: Optional[float]
+ disable_background_sender=True, # type: bool
+ sender_queue_size=0, # type: int
+ sender_queue_timeout=0, # type: Optional[float]
+ track_instance=True, # type: bool
+ ): # type: (...) -> None
+ """
+ Initialize a DogStatsd object.
+
+ >>> statsd = DogStatsd()
+
+ :envvar DD_AGENT_HOST: the host of the DogStatsd server.
+ If set, it overrides default value.
+ :type DD_AGENT_HOST: string
+
+ :envvar DD_DOGSTATSD_PORT: the port of the DogStatsd server.
+ If set, it overrides default value.
+ :type DD_DOGSTATSD_PORT: integer
+
+ :envvar DATADOG_TAGS: Tags to attach to every metric reported by dogstatsd client.
+ :type DATADOG_TAGS: comma-delimited string
+
+ :envvar DD_ENTITY_ID: Tag to identify the client entity.
+ :type DD_ENTITY_ID: string
+
+ :envvar DD_ENV: the env of the service running the dogstatsd client.
+ If set, it is appended to the constant (global) tags of the statsd client.
+ :type DD_ENV: string
+
+ :envvar DD_SERVICE: the name of the service running the dogstatsd client.
+ If set, it is appended to the constant (global) tags of the statsd client.
+ :type DD_SERVICE: string
+
+ :envvar DD_VERSION: the version of the service running the dogstatsd client.
+ If set, it is appended to the constant (global) tags of the statsd client.
+ :type DD_VERSION: string
+
+ :envvar DD_DOGSTATSD_DISABLE: Disable any statsd metric collection (default False)
+ :type DD_DOGSTATSD_DISABLE: boolean
+
+ :envvar DD_TELEMETRY_HOST: the host for the dogstatsd server we wish to submit
+ telemetry stats to. If set, it overrides default value.
+ :type DD_TELEMETRY_HOST: string
+
+ :envvar DD_TELEMETRY_PORT: the port for the dogstatsd server we wish to submit
+ telemetry stats to. If set, it overrides default value.
+ :type DD_TELEMETRY_PORT: integer
+
+ :envvar DD_ORIGIN_DETECTION_ENABLED: Enable/disable sending the container ID field
+ for origin detection.
+ :type DD_ORIGIN_DETECTION_ENABLED: boolean
+
+ :envvar DD_DOGSTATSD_DISABLE_FORK_SUPPORT: Don't install global fork hooks with os.register_at_fork.
+ Global fork hooks then need to be called manually before and after calling os.fork.
+ :type DD_DOGSTATSD_DISABLE_FORK_SUPPORT: boolean
+
+ :envvar DD_DOGSTATSD_DISABLE_INSTANCE_TRACKING: Don't register instances of this class with global fork hooks.
+ :type DD_DOGSTATSD_DISABLE_INSTANCE_TRACKING: boolean
+
+ :param host: the host of the DogStatsd server.
+ :type host: string
+
+ :param port: the port of the DogStatsd server.
+ :type port: integer
+
+ :max_buffer_size: Deprecated option, do not use it anymore.
+ :type max_buffer_type: None
+
+ :flush_interval: Amount of time in seconds that the flush thread will
+ wait before trying to flush the buffered metrics to the server. If set,
+ it overrides the default value.
+ :type flush_interval: float
+
+ :disable_buffering: If set, metrics are no longered buffered by the client and
+ all data is sent synchronously to the server
+ :type disable_buffering: bool
+
+ :param namespace: Namespace to prefix all metric names
+ :type namespace: string
+
+ :param constant_tags: Tags to attach to all metrics
+ :type constant_tags: list of strings
+
+ :param use_ms: Report timed values in milliseconds instead of seconds (default False)
+ :type use_ms: boolean
+
+ :param use_default_route: Dynamically set the DogStatsd host to the default route
+ (Useful when running the client in a container) (Linux only)
+ :type use_default_route: boolean
+
+ :param socket_path: Communicate with dogstatsd through a UNIX socket instead of
+ UDP. If set, disables UDP transmission (Linux only)
+ :type socket_path: string
+
+ :param default_sample_rate: Sample rate to use by default for all metrics
+ :type default_sample_rate: float
+
+ :param max_buffer_len: Maximum number of bytes to buffer before sending to the server
+ if sending metrics in batch. If not specified it will be adjusted to a optimal value
+ depending on the connection type.
+ :type max_buffer_len: integer
+
+ :param disable_telemetry: Should client telemetry be disabled
+ :type disable_telemetry: boolean
+
+ :param telemetry_min_flush_interval: Minimum flush interval for telemetry in seconds
+ :type telemetry_min_flush_interval: integer
+
+ :param telemetry_host: the host for the dogstatsd server we wish to submit
+ telemetry stats to. Optional. If telemetry is enabled and this is not specified
+ the default host will be used.
+ :type host: string
+
+ :param telemetry_port: the port for the dogstatsd server we wish to submit
+ telemetry stats to. Optional. If telemetry is enabled and this is not specified
+ the default host will be used.
+ :type port: integer
+
+ :param telemetry_socket_path: Submit client telemetry to dogstatsd through a UNIX
+ socket instead of UDP. If set, disables UDP transmission (Linux only)
+ :type telemetry_socket_path: string
+
+ :param container_id: Allows passing the container ID, this will be used by the Agent to enrich
+ metrics with container tags.
+ This feature requires Datadog Agent version >=6.35.0 && <7.0.0 or Agent versions >=7.35.0.
+ When configured, the provided container ID is prioritized over the container ID discovered
+ via Origin Detection. When DD_ENTITY_ID is set, this value is ignored.
+ Default: None.
+ :type container_id: string
+
+ :param origin_detection_enabled: Enable/disable the client origin detection.
+ This feature requires Datadog Agent version >=6.35.0 && <7.0.0 or Agent versions >=7.35.0.
+ When enabled, the client tries to discover its container ID and sends it to the Agent
+ to enrich the metrics with container tags.
+ Origin detection can be disabled by configuring the environment variabe DD_ORIGIN_DETECTION_ENABLED=false
+ The client tries to read the container ID by parsing the file /proc/self/cgroup.
+ This is not supported on Windows.
+ The client prioritizes the value passed via DD_ENTITY_ID (if set) over the container ID.
+ Default: True.
+ More on this: https://docs.datadoghq.com/developers/dogstatsd/?tab=kubernetes#origin-detection-over-udp
+ :type origin_detection_enabled: boolean
+
+ :param socket_timeout: Set timeout for socket operations, in seconds. Optional.
+ If sets to zero, never wait if operation can not be completed immediately. If set to None, wait forever.
+ This option does not affect hostname resolution when using UDP.
+ :type socket_timeout: float
+
+ :param telemetry_socket_timeout: Set timeout for the telemetry socket operations. Optional.
+ Effective only if either telemetry_host or telemetry_socket_path are set.
+ If sets to zero, never wait if operation can not be completed immediately. If set to None, wait forever.
+ This option does not affect hostname resolution when using UDP.
+ :type telemetry_socket_timeout: float
+
+ :param disable_background_sender: Use a background thread to communicate with the dogstatsd server. Optional.
+ When enabled, a background thread will be used to send metric payloads to the Agent.
+ Applications should call stop() before exiting to make sure all pending payloads are sent.
+ Default: True.
+ :type disable_background_sender: boolean
+
+ :param sender_queue_size: Set the maximum number of packets to queue for the sender. Optional
+ How may packets to queue before blocking or dropping the packet if the packet queue is already full.
+ Default: 0 (unlimited).
+ :type sender_queue_size: integer
+
+ :param sender_queue_timeout: Set timeout for packet queue operations, in seconds. Optional.
+ How long the application thread is willing to wait for the queue clear up before dropping the metric packet.
+ If set to None, wait forever.
+ If set to zero drop the packet immediately if the queue is full.
+ Default: 0 (no wait)
+ :type sender_queue_timeout: float
+
+ :param track_instance: Keep track of this instance and automatically handle cleanup when os.fork() is called,
+ if supported.
+ Default: True.
+ :type track_instance: boolean
+ """
+
+ self._socket_lock = Lock()
+
+ # Check for deprecated option
+ if max_buffer_size is not None:
+ log.warning("The parameter max_buffer_size is now deprecated and is not used anymore")
+
+ # Check host and port env vars
+ agent_host = os.environ.get("DD_AGENT_HOST")
+ if agent_host and host == DEFAULT_HOST:
+ host = agent_host
+
+ dogstatsd_port = os.environ.get("DD_DOGSTATSD_PORT")
+ if dogstatsd_port and port == DEFAULT_PORT:
+ try:
+ port = int(dogstatsd_port)
+ except ValueError:
+ log.warning(
+ "Port number provided in DD_DOGSTATSD_PORT env var is not an integer: \
+ %s, using %s as port number",
+ dogstatsd_port,
+ port,
+ )
+
+ # Assuming environment variables always override
+ telemetry_host = os.environ.get("DD_TELEMETRY_HOST", telemetry_host)
+ telemetry_port = os.environ.get("DD_TELEMETRY_PORT", telemetry_port) or port
+
+ # Check enabled
+ if os.environ.get("DD_DOGSTATSD_DISABLE") not in {"True", "true", "yes", "1"}:
+ self._enabled = True
+ else:
+ self._enabled = False
+
+ # Connection
+ self._max_buffer_len = max_buffer_len
+ self.socket_timeout = socket_timeout
+ if socket_path is not None:
+ self.socket_path = socket_path # type: Optional[text]
+ self.host = None
+ self.port = None
+ else:
+ self.socket_path = None
+ self.host = self.resolve_host(host, use_default_route)
+ self.port = int(port)
+
+ self.telemetry_socket_path = telemetry_socket_path
+ self.telemetry_host = None
+ self.telemetry_port = None
+ self.telemetry_socket_timeout = telemetry_socket_timeout
+ if not telemetry_socket_path and telemetry_host:
+ self.telemetry_socket_path = None
+ self.telemetry_host = self.resolve_host(telemetry_host, use_default_route)
+ self.telemetry_port = int(telemetry_port)
+
+ # Socket
+ self.socket = None
+ self.telemetry_socket = None
+ self.encoding = "utf-8"
+
+ # Options
+ env_tags = [tag for tag in os.environ.get("DATADOG_TAGS", "").split(",") if tag]
+ # Inject values of DD_* environment variables as global tags.
+ has_entity_id = False
+ for var, tag_name in DD_ENV_TAGS_MAPPING.items():
+ value = os.environ.get(var, "")
+ if value:
+ env_tags.append("{name}:{value}".format(name=tag_name, value=value))
+ if var == ENTITY_ID_ENV_VAR:
+ has_entity_id = True
+ if constant_tags is None:
+ constant_tags = []
+ self.constant_tags = constant_tags + env_tags
+ if namespace is not None:
+ namespace = text(namespace)
+ self.namespace = namespace
+ self.use_ms = use_ms
+ self.default_sample_rate = default_sample_rate
+
+ # Origin detection
+ self._container_id = None
+ if not has_entity_id:
+ origin_detection_enabled = self._is_origin_detection_enabled(
+ container_id, origin_detection_enabled, has_entity_id
+ )
+ self._set_container_id(container_id, origin_detection_enabled)
+
+ # init telemetry version
+ self._client_tags = [
+ "client:py",
+ "client_version:{}".format(__version__),
+ ]
+ self._reset_telemetry()
+ self._telemetry_flush_interval = telemetry_min_flush_interval
+ self._telemetry = not disable_telemetry
+ self._last_flush_time = time.time()
+
+ self._current_buffer_total_size = 0
+ self._buffer = [] # type: List[Text]
+ self._buffer_lock = RLock()
+
+ self._reset_buffer()
+
+ # This lock is used for all cases where client configuration is being changed: buffering, sender mode.
+ self._config_lock = RLock()
+
+ # If buffering is disabled, we bypass the buffer function.
+ self._send = self._send_to_buffer
+ self._disable_buffering = disable_buffering
+ if self._disable_buffering:
+ self._send = self._send_to_server
+ log.debug("Statsd buffering is disabled")
+
+ # Indicates if the process is about to fork, so we shouldn't start any new threads yet.
+ self._forking = False
+
+ # Start the flush thread if buffering is enabled and the interval is above
+ # a reasonable range. This both prevents thrashing and allow us to use "0.0"
+ # as a value for disabling the automatic flush timer as well.
+ self._flush_interval = flush_interval
+ self._flush_thread_stop = threading.Event()
+ self._flush_thread = None
+ self._start_flush_thread(self._flush_interval)
+
+ self._queue = None
+ self._sender_thread = None
+ self._sender_enabled = False
+
+ if not disable_background_sender:
+ self.enable_background_sender(sender_queue_size, sender_queue_timeout)
+
+ if TRACK_INSTANCES and track_instance:
+ _instances.add(self)
+
+ @property
+ def socket_path(self):
+ return self._socket_path
+
+ @socket_path.setter
+ def socket_path(self, path):
+ with self._socket_lock:
+ self._socket_path = path
+ if path is None:
+ self._transport = "udp"
+ self._max_payload_size = self._max_buffer_len or UDP_OPTIMAL_PAYLOAD_LENGTH
+ else:
+ self._transport = "uds"
+ self._max_payload_size = self._max_buffer_len or UDS_OPTIMAL_PAYLOAD_LENGTH
+
+ def enable_background_sender(self, sender_queue_size=0, sender_queue_timeout=0):
+ """
+ Use a background thread to communicate with the dogstatsd server.
+ When enabled, a background thread will be used to send metric payloads to the Agent.
+
+ Applications should call stop() before exiting to make sure all pending payloads are sent.
+
+ Compatible with os.fork() starting with Python 3.7. On earlier versions, compatible if applications
+ arrange to call pre_fork() and post_fork() module functions around calls to os.fork().
+
+ :param sender_queue_size: Set the maximum number of packets to queue for the sender.
+ How many packets to queue before blocking or dropping the packet if the packet queue is already full.
+ Default: 0 (unlimited).
+ :type sender_queue_size: integer, optional
+ :param sender_queue_timeout: Set timeout for packet queue operations, in seconds.
+ How long the application thread is willing to wait for the queue clear up before dropping the metric packet.
+ If set to None, wait forever. If set to zero drop the packet immediately if the queue is full.
+ Default: 0 (no wait).
+ :type sender_queue_timeout: float, optional
+ """
+
+ with self._config_lock:
+ self._sender_enabled = True
+ self._sender_queue_size = sender_queue_size
+ if sender_queue_timeout is None:
+ self._queue_blocking = True
+ self._queue_timeout = None
+ else:
+ self._queue_blocking = sender_queue_timeout > 0
+ self._queue_timeout = max(0, sender_queue_timeout)
+
+ self._start_sender_thread()
+
+ def disable_background_sender(self):
+ """Disable background sender mode.
+
+ This call will block until all previously queued payloads are sent.
+ """
+ with self._config_lock:
+ self._sender_enabled = False
+ self._stop_sender_thread()
+
+ def disable_telemetry(self):
+ self._telemetry = False
+
+ def enable_telemetry(self):
+ self._telemetry = True
+
+ # Note: Invocations of this method should be thread-safe
+ def _start_flush_thread(self, flush_interval):
+ if self._disable_buffering or self._flush_interval <= MIN_FLUSH_INTERVAL:
+ log.debug("Statsd periodic buffer flush is disabled")
+ return
+
+ if self._forking:
+ return
+
+ if self._flush_thread is not None:
+ return
+
+ def _flush_thread_loop(self, flush_interval):
+ while not self._flush_thread_stop.is_set():
+ time.sleep(flush_interval)
+ self.flush()
+
+ self._flush_thread = threading.Thread(
+ name="{}_flush_thread".format(self.__class__.__name__),
+ target=_flush_thread_loop,
+ args=(self, flush_interval,),
+ )
+ self._flush_thread.daemon = True
+ self._flush_thread.start()
+
+ log.debug(
+ "Statsd flush thread registered with period of %s",
+ self._flush_interval,
+ )
+
+ # Note: Invocations of this method should be thread-safe
+ def _stop_flush_thread(self):
+ if not self._flush_thread:
+ return
+
+ try:
+ self.flush()
+ finally:
+ pass
+
+ self._flush_thread_stop.set()
+
+ self._flush_thread.join()
+ self._flush_thread = None
+
+ self._flush_thread_stop.clear()
+
+ def _dedicated_telemetry_destination(self):
+ return bool(self.telemetry_socket_path or self.telemetry_host)
+
+ # Context manager helper
+ def __enter__(self):
+ self.open_buffer()
+ return self
+
+ # Context manager helper
+ def __exit__(self, exc_type, value, traceback):
+ self.close_buffer()
+
+ @property
+ def disable_buffering(self):
+ with self._config_lock:
+ return self._disable_buffering
+
+ @disable_buffering.setter
+ def disable_buffering(self, is_disabled):
+ with self._config_lock:
+ # If the toggle didn't change anything, this method is a noop
+ if self._disable_buffering == is_disabled:
+ return
+
+ self._disable_buffering = is_disabled
+
+ # If buffering has been disabled, flush and kill the background thread
+ # otherwise start up the flushing thread and enable the buffering.
+ if is_disabled:
+ self._send = self._send_to_server
+ self._stop_flush_thread()
+ log.debug("Statsd buffering is disabled")
+ else:
+ self._send = self._send_to_buffer
+ self._start_flush_thread(self._flush_interval)
+
+ @staticmethod
+ def resolve_host(host, use_default_route):
+ """
+ Resolve the DogStatsd host.
+
+ :param host: host
+ :type host: string
+ :param use_default_route: Use the system default route as host (overrides `host` parameter)
+ :type use_default_route: bool
+ """
+ if not use_default_route:
+ return host
+
+ return get_default_route()
+
+ def get_socket(self, telemetry=False):
+ """
+ Return a connected socket.
+
+ Note: connect the socket before assigning it to the class instance to
+ avoid bad thread race conditions.
+ """
+ with self._socket_lock:
+ if telemetry and self._dedicated_telemetry_destination():
+ if not self.telemetry_socket:
+ if self.telemetry_socket_path is not None:
+ self.telemetry_socket = self._get_uds_socket(
+ self.telemetry_socket_path,
+ self.telemetry_socket_timeout,
+ )
+ else:
+ self.telemetry_socket = self._get_udp_socket(
+ self.telemetry_host,
+ self.telemetry_port,
+ self.telemetry_socket_timeout,
+ )
+
+ return self.telemetry_socket
+
+ if not self.socket:
+ if self.socket_path is not None:
+ self.socket = self._get_uds_socket(self.socket_path, self.socket_timeout)
+ else:
+ self.socket = self._get_udp_socket(
+ self.host,
+ self.port,
+ self.socket_timeout,
+ )
+
+ return self.socket
+
+ def set_socket_timeout(self, timeout):
+ """
+ Set timeout for socket operations, in seconds.
+
+ If set to zero, never wait if operation can not be completed immediately. If set to None, wait forever.
+ This option does not affect hostname resolution when using UDP.
+ """
+ with self._socket_lock:
+ self.socket_timeout = timeout
+ if self.socket:
+ self.socket.settimeout(timeout)
+
+ @classmethod
+ def _ensure_min_send_buffer_size(cls, sock, min_size=MIN_SEND_BUFFER_SIZE):
+ # Increase the receiving buffer size where needed (e.g. MacOS has 4k RX
+ # buffers which is half of the max packet size that the client will send.
+ if os.name == 'posix':
+ try:
+ recv_buff_size = sock.getsockopt(socket.SOL_SOCKET, socket.SO_SNDBUF)
+ if recv_buff_size <= min_size:
+ sock.setsockopt(socket.SOL_SOCKET, socket.SO_SNDBUF, min_size)
+ log.debug("Socket send buffer increased to %dkb", min_size / 1024)
+ finally:
+ pass
+
+ @classmethod
+ def _get_uds_socket(cls, socket_path, timeout):
+ sock = socket.socket(socket.AF_UNIX, socket.SOCK_DGRAM)
+ sock.settimeout(timeout)
+ cls._ensure_min_send_buffer_size(sock)
+ sock.connect(socket_path)
+ return sock
+
+ @classmethod
+ def _get_udp_socket(cls, host, port, timeout):
+ log.debug("Connecting to %s:%s", host, port)
+ addrinfo = socket.getaddrinfo(host, port, 0, socket.SOCK_DGRAM)
+ # Override gai.conf order for backwrads compatibility: prefer
+ # v4, so that a v4-only service on hosts with both addresses
+ # still works.
+ addrinfo.sort(key=lambda v: v[0] == socket.AF_INET, reverse=True)
+ lastaddr = len(addrinfo) - 1
+ for i, (af, ty, proto, _, addr) in enumerate(addrinfo):
+ sock = None
+ try:
+ sock = socket.socket(af, ty, proto)
+ sock.settimeout(timeout)
+ cls._ensure_min_send_buffer_size(sock)
+ sock.connect(addr)
+ log.debug("Connected to: %s", addr)
+ return sock
+ except Exception as e:
+ if sock is not None:
+ sock.close()
+ log.debug("Failed to connect to %s: %s", addr, e)
+ if i < lastaddr:
+ continue
+ raise e
+ else:
+ raise ValueError("getaddrinfo returned no addresses to connect to")
+
+ def open_buffer(self, max_buffer_size=None):
+ """
+ Open a buffer to send a batch of metrics.
+
+ To take advantage of automatic flushing, you should use the context manager instead
+
+ >>> with DogStatsd() as batch:
+ >>> batch.gauge("users.online", 123)
+ >>> batch.gauge("active.connections", 1001)
+
+ Note: This method must be called before close_buffer() matching invocation.
+ """
+
+ self._config_lock.acquire()
+
+ # XXX Remove if `disable_buffering` default is changed to False
+ self._send = self._send_to_buffer
+
+ if max_buffer_size is not None:
+ log.warning("The parameter max_buffer_size is now deprecated and is not used anymore")
+
+ self._reset_buffer()
+
+ def close_buffer(self):
+ """
+ Flush the buffer and switch back to single metric packets.
+
+ Note: This method must be called after a matching open_buffer()
+ invocation.
+ """
+ try:
+ self.flush()
+ finally:
+ # XXX Remove if `disable_buffering` default is changed to False
+ if self._disable_buffering:
+ self._send = self._send_to_server
+
+ self._config_lock.release()
+
+ def _reset_buffer(self):
+ with self._buffer_lock:
+ self._current_buffer_total_size = 0
+ self._buffer = []
+
+ def flush(self):
+ """
+ Flush the metrics buffer by sending the data to the server.
+ """
+ with self._buffer_lock:
+ # Only send packets if there are packets to send
+ if self._buffer:
+ self._send_to_server("\n".join(self._buffer))
+ self._reset_buffer()
+
+ def gauge(
+ self,
+ metric, # type: Text
+ value, # type: float
+ tags=None, # type: Optional[List[str]]
+ sample_rate=None, # type: Optional[float]
+ ): # type(...) -> None
+ """
+ Record the value of a gauge, optionally setting a list of tags and a
+ sample rate.
+
+ >>> statsd.gauge("users.online", 123)
+ >>> statsd.gauge("active.connections", 1001, tags=["protocol:http"])
+ """
+ return self._report(metric, "g", value, tags, sample_rate)
+
+ def increment(
+ self,
+ metric, # type: Text
+ value=1, # type: float
+ tags=None, # type: Optional[List[str]]
+ sample_rate=None, # type: Optional[float]
+ ): # type: (...) -> None
+ """
+ Increment a counter, optionally setting a value, tags and a sample
+ rate.
+
+ >>> statsd.increment("page.views")
+ >>> statsd.increment("files.transferred", 124)
+ """
+ self._report(metric, "c", value, tags, sample_rate)
+
+ def decrement(
+ self,
+ metric, # type: Text
+ value=1, # type: float
+ tags=None, # type: Optional[List[str]]
+ sample_rate=None, # type: Optional[float]
+ ): # type(...) -> None
+ """
+ Decrement a counter, optionally setting a value, tags and a sample
+ rate.
+
+ >>> statsd.decrement("files.remaining")
+ >>> statsd.decrement("active.connections", 2)
+ """
+ metric_value = -value if value else value
+ self._report(metric, "c", metric_value, tags, sample_rate)
+
+ def histogram(
+ self,
+ metric, # type: Text
+ value, # type: float
+ tags=None, # type: Optional[List[str]]
+ sample_rate=None, # type: Optional[float]
+ ): # type(...) -> None
+ """
+ Sample a histogram value, optionally setting tags and a sample rate.
+
+ >>> statsd.histogram("uploaded.file.size", 1445)
+ >>> statsd.histogram("album.photo.count", 26, tags=["gender:female"])
+ """
+ self._report(metric, "h", value, tags, sample_rate)
+
+ def distribution(
+ self,
+ metric, # type: Text
+ value, # type: float
+ tags=None, # type: Optional[List[str]]
+ sample_rate=None, # type: Optional[float]
+ ): # type(...) -> None
+ """
+ Send a global distribution value, optionally setting tags and a sample rate.
+
+ >>> statsd.distribution("uploaded.file.size", 1445)
+ >>> statsd.distribution("album.photo.count", 26, tags=["gender:female"])
+ """
+ self._report(metric, "d", value, tags, sample_rate)
+
+ def timing(
+ self,
+ metric, # type: Text
+ value, # type: float
+ tags=None, # type: Optional[List[str]]
+ sample_rate=None, # type: Optional[float]
+ ): # type(...) -> None
+ """
+ Record a timing, optionally setting tags and a sample rate.
+
+ >>> statsd.timing("query.response.time", 1234)
+ """
+ self._report(metric, "ms", value, tags, sample_rate)
+
+ def timed(self, metric=None, tags=None, sample_rate=None, use_ms=None):
+ """
+ A decorator or context manager that will measure the distribution of a
+ function's/context's run time. Optionally specify a list of tags or a
+ sample rate. If the metric is not defined as a decorator, the module
+ name and function name will be used. The metric is required as a context
+ manager.
+ ::
+
+ @statsd.timed("user.query.time", sample_rate=0.5)
+ def get_user(user_id):
+ # Do what you need to ...
+ pass
+
+ # Is equivalent to ...
+ with statsd.timed("user.query.time", sample_rate=0.5):
+ # Do what you need to ...
+ pass
+
+ # Is equivalent to ...
+ start = time.time()
+ try:
+ get_user(user_id)
+ finally:
+ statsd.timing("user.query.time", time.time() - start)
+ """
+ return TimedContextManagerDecorator(self, metric, tags, sample_rate, use_ms)
+
+ def distributed(self, metric=None, tags=None, sample_rate=None, use_ms=None):
+ """
+ A decorator or context manager that will measure the distribution of a
+ function's/context's run time using custom metric distribution.
+ Optionally specify a list of tags or a sample rate. If the metric is not
+ defined as a decorator, the module name and function name will be used.
+ The metric is required as a context manager.
+ ::
+
+ @statsd.distributed("user.query.time", sample_rate=0.5)
+ def get_user(user_id):
+ # Do what you need to ...
+ pass
+
+ # Is equivalent to ...
+ with statsd.distributed("user.query.time", sample_rate=0.5):
+ # Do what you need to ...
+ pass
+
+ # Is equivalent to ...
+ start = time.time()
+ try:
+ get_user(user_id)
+ finally:
+ statsd.distribution("user.query.time", time.time() - start)
+ """
+ return DistributedContextManagerDecorator(self, metric, tags, sample_rate, use_ms)
+
+ def set(self, metric, value, tags=None, sample_rate=None):
+ """
+ Sample a set value.
+
+ >>> statsd.set("visitors.uniques", 999)
+ """
+ self._report(metric, "s", value, tags, sample_rate)
+
+ def close_socket(self):
+ """
+ Closes connected socket if connected.
+ """
+ with self._socket_lock:
+ if self.socket:
+ try:
+ self.socket.close()
+ except OSError as e:
+ log.error("Unexpected error: %s", str(e))
+ self.socket = None
+
+ if self.telemetry_socket:
+ try:
+ self.telemetry_socket.close()
+ except OSError as e:
+ log.error("Unexpected error: %s", str(e))
+ self.telemetry_socket = None
+
+ def _serialize_metric(self, metric, metric_type, value, tags, sample_rate=1):
+ # Create/format the metric packet
+ return "%s%s:%s|%s%s%s%s" % (
+ (self.namespace + ".") if self.namespace else "",
+ metric,
+ value,
+ metric_type,
+ ("|@" + text(sample_rate)) if sample_rate != 1 else "",
+ ("|#" + ",".join(normalize_tags(tags))) if tags else "",
+ ("|c:" + self._container_id if self._container_id else "")
+ )
+
+ def _report(self, metric, metric_type, value, tags, sample_rate):
+ """
+ Create a metric packet and send it.
+
+ More information about the packets' format: http://docs.datadoghq.com/guides/dogstatsd/
+ """
+ if value is None:
+ return
+
+ if self._enabled is not True:
+ return
+
+ if self._telemetry:
+ self.metrics_count += 1
+
+ if sample_rate is None:
+ sample_rate = self.default_sample_rate
+
+ if sample_rate != 1 and random() > sample_rate:
+ return
+
+ # Resolve the full tag list
+ tags = self._add_constant_tags(tags)
+ payload = self._serialize_metric(metric, metric_type, value, tags, sample_rate)
+
+ # Send it
+ self._send(payload)
+
+ def _reset_telemetry(self):
+ self.metrics_count = 0
+ self.events_count = 0
+ self.service_checks_count = 0
+ self.bytes_sent = 0
+ self.bytes_dropped_queue = 0
+ self.bytes_dropped_writer = 0
+ self.packets_sent = 0
+ self.packets_dropped_queue = 0
+ self.packets_dropped_writer = 0
+ self._last_flush_time = time.time()
+
+ # Aliases for backwards compatibility.
+ @property
+ def packets_dropped(self):
+ return self.packets_dropped_queue + self.packets_dropped_writer
+
+ @property
+ def bytes_dropped(self):
+ return self.bytes_dropped_queue + self.bytes_dropped_writer
+
+ def _flush_telemetry(self):
+ tags = self._client_tags[:]
+ tags.append("client_transport:{}".format(self._transport))
+ tags.extend(self.constant_tags)
+ telemetry_tags = ",".join(tags)
+
+ return TELEMETRY_FORMATTING_STR % (
+ self.metrics_count,
+ telemetry_tags,
+ self.events_count,
+ telemetry_tags,
+ self.service_checks_count,
+ telemetry_tags,
+ self.bytes_sent,
+ telemetry_tags,
+ self.bytes_dropped_queue + self.bytes_dropped_writer,
+ telemetry_tags,
+ self.bytes_dropped_queue,
+ telemetry_tags,
+ self.bytes_dropped_writer,
+ telemetry_tags,
+ self.packets_sent,
+ telemetry_tags,
+ self.packets_dropped_queue + self.packets_dropped_writer,
+ telemetry_tags,
+ self.packets_dropped_queue,
+ telemetry_tags,
+ self.packets_dropped_writer,
+ telemetry_tags,
+ )
+
+ def _is_telemetry_flush_time(self):
+ return self._telemetry and \
+ self._last_flush_time + self._telemetry_flush_interval < time.time()
+
+ def _send_to_server(self, packet):
+ # Skip the lock if the queue is None. There is no race with enable_background_sender.
+ if self._queue is not None:
+ # Prevent a race with disable_background_sender.
+ with self._buffer_lock:
+ if self._queue is not None:
+ try:
+ self._queue.put(packet + '\n', self._queue_blocking, self._queue_timeout)
+ except queue.Full:
+ self.packets_dropped_queue += 1
+ self.bytes_dropped_queue += 1
+ return
+
+ self._xmit_packet_with_telemetry(packet + '\n')
+
+ def _xmit_packet_with_telemetry(self, packet):
+ self._xmit_packet(packet, False)
+
+ if self._is_telemetry_flush_time():
+ telemetry = self._flush_telemetry()
+ if self._xmit_packet(telemetry, True):
+ self._reset_telemetry()
+ self.packets_sent += 1
+ self.bytes_sent += len(telemetry)
+ else:
+ # Telemetry packet has been dropped, keep telemetry data for the next flush
+ self._last_flush_time = time.time()
+ self.bytes_dropped_writer += len(telemetry)
+ self.packets_dropped_writer += 1
+
+ def _xmit_packet(self, packet, is_telemetry):
+ try:
+ if is_telemetry and self._dedicated_telemetry_destination():
+ mysocket = self.telemetry_socket or self.get_socket(telemetry=True)
+ else:
+ # If set, use socket directly
+ mysocket = self.socket or self.get_socket()
+
+ mysocket.send(packet.encode(self.encoding))
+
+ if not is_telemetry and self._telemetry:
+ self.packets_sent += 1
+ self.bytes_sent += len(packet)
+
+ return True
+ except socket.timeout:
+ # dogstatsd is overflowing, drop the packets (mimics the UDP behaviour)
+ pass
+ except (socket.herror, socket.gaierror) as socket_err:
+ log.warning(
+ "Error submitting packet: %s, dropping the packet and closing the socket",
+ socket_err,
+ )
+ self.close_socket()
+ except socket.error as socket_err:
+ if socket_err.errno == errno.EAGAIN:
+ log.debug("Socket send would block: %s, dropping the packet", socket_err)
+ elif socket_err.errno == errno.ENOBUFS:
+ log.debug("Socket buffer full: %s, dropping the packet", socket_err)
+ elif socket_err.errno == errno.EMSGSIZE:
+ log.debug(
+ "Packet size too big (size: %d): %s, dropping the packet",
+ len(packet.encode(self.encoding)),
+ socket_err)
+ else:
+ log.warning(
+ "Error submitting packet: %s, dropping the packet and closing the socket",
+ socket_err,
+ )
+ self.close_socket()
+ except Exception as exc:
+ print("Unexpected error: %s", exc)
+ log.error("Unexpected error: %s", str(exc))
+
+ if not is_telemetry and self._telemetry:
+ self.bytes_dropped_writer += len(packet)
+ self.packets_dropped_writer += 1
+
+ return False
+
+ def _send_to_buffer(self, packet):
+ with self._buffer_lock:
+ if self._should_flush(len(packet)):
+ self.flush()
+
+ self._buffer.append(packet)
+ # Update the current buffer length, including line break to anticipate
+ # the final packet size
+ self._current_buffer_total_size += len(packet) + 1
+
+ def _should_flush(self, length_to_be_added):
+ if self._current_buffer_total_size + length_to_be_added + 1 > self._max_payload_size:
+ return True
+ return False
+
+ @staticmethod
+ def _escape_event_content(string):
+ return string.replace("\n", "\\n")
+
+ @staticmethod
+ def _escape_service_check_message(string):
+ return string.replace("\n", "\\n").replace("m:", "m\\:")
+
+ def event(
+ self,
+ title,
+ message,
+ alert_type=None,
+ aggregation_key=None,
+ source_type_name=None,
+ date_happened=None,
+ priority=None,
+ tags=None,
+ hostname=None,
+ ):
+ """
+ Send an event. Attributes are the same as the Event API.
+ http://docs.datadoghq.com/api/
+
+ >>> statsd.event("Man down!", "This server needs assistance.")
+ >>> statsd.event("Web server restart", "The web server is up", alert_type="success") # NOQA
+ """
+ title = DogStatsd._escape_event_content(title)
+ message = DogStatsd._escape_event_content(message)
+
+ # pylint: disable=undefined-variable
+ if not is_p3k():
+ if not isinstance(title, unicode): # noqa: F821
+ title = unicode(DogStatsd._escape_event_content(title), 'utf8') # noqa: F821
+ if not isinstance(message, unicode): # noqa: F821
+ message = unicode(DogStatsd._escape_event_content(message), 'utf8') # noqa: F821
+
+ # Append all client level tags to every event
+ tags = self._add_constant_tags(tags)
+
+ string = u"_e{{{},{}}}:{}|{}".format(
+ len(title.encode('utf8', 'replace')),
+ len(message.encode('utf8', 'replace')),
+ title,
+ message,
+ )
+
+ if date_happened:
+ string = "%s|d:%d" % (string, date_happened)
+ if hostname:
+ string = "%s|h:%s" % (string, hostname)
+ if aggregation_key:
+ string = "%s|k:%s" % (string, aggregation_key)
+ if priority:
+ string = "%s|p:%s" % (string, priority)
+ if source_type_name:
+ string = "%s|s:%s" % (string, source_type_name)
+ if alert_type:
+ string = "%s|t:%s" % (string, alert_type)
+ if tags:
+ string = "%s|#%s" % (string, ",".join(tags))
+ if self._container_id:
+ string = "%s|c:%s" % (string, self._container_id)
+
+ if len(string) > 8 * 1024:
+ raise ValueError(
+ u'Event "{0}" payload is too big (>=8KB). Event discarded'.format(
+ title
+ )
+ )
+
+ if self._telemetry:
+ self.events_count += 1
+
+ self._send(string)
+
+ def service_check(
+ self,
+ check_name,
+ status,
+ tags=None,
+ timestamp=None,
+ hostname=None,
+ message=None,
+ ):
+ """
+ Send a service check run.
+
+ >>> statsd.service_check("my_service.check_name", DogStatsd.WARNING)
+ """
+ message = DogStatsd._escape_service_check_message(message) if message is not None else ""
+
+ string = u"_sc|{0}|{1}".format(check_name, status)
+
+ # Append all client level tags to every status check
+ tags = self._add_constant_tags(tags)
+
+ if timestamp:
+ string = u"{0}|d:{1}".format(string, timestamp)
+ if hostname:
+ string = u"{0}|h:{1}".format(string, hostname)
+ if tags:
+ string = u"{0}|#{1}".format(string, ",".join(tags))
+ if message:
+ string = u"{0}|m:{1}".format(string, message)
+ if self._container_id:
+ string = u"{0}|c:{1}".format(string, self._container_id)
+
+ if self._telemetry:
+ self.service_checks_count += 1
+
+ self._send(string)
+
+ def _add_constant_tags(self, tags):
+ if self.constant_tags:
+ if tags:
+ return tags + self.constant_tags
+
+ return self.constant_tags
+ return tags
+
+ def _is_origin_detection_enabled(self, container_id, origin_detection_enabled, has_entity_id):
+ """
+ Returns whether the client should fill the container field.
+ If DD_ENTITY_ID is set, we don't send the container ID
+ If a user-defined container ID is provided, we don't ignore origin detection
+ as dd.internal.entity_id is prioritized over the container field for backward compatibility.
+ If DD_ENTITY_ID is not set, we try to fill the container field automatically unless
+ DD_ORIGIN_DETECTION_ENABLED is explicitly set to false.
+ """
+ if not origin_detection_enabled or has_entity_id or container_id is not None:
+ # origin detection is explicitly disabled
+ # or DD_ENTITY_ID was found
+ # or a user-defined container ID was provided
+ return False
+ value = os.environ.get(ORIGIN_DETECTION_ENABLED, "")
+ return value.lower() not in {"no", "false", "0", "n", "off"}
+
+ def _set_container_id(self, container_id, origin_detection_enabled):
+ """
+ Initializes the container ID.
+ It can either be provided by the user or read from cgroups.
+ """
+ if container_id:
+ self._container_id = container_id
+ return
+ if origin_detection_enabled:
+ try:
+ reader = ContainerID()
+ self._container_id = reader.container_id
+ except Exception as e:
+ log.debug("Couldn't get container ID: %s", str(e))
+ self._container_id = None
+
+ def _start_sender_thread(self):
+ if not self._sender_enabled or self._forking:
+ return
+
+ if self._queue is not None:
+ return
+
+ self._queue = queue.Queue(self._sender_queue_size)
+
+ log.debug("Starting background sender thread")
+ self._sender_thread = threading.Thread(
+ name="{}_sender_thread".format(self.__class__.__name__),
+ target=self._sender_main_loop,
+ args=(self._queue,)
+ )
+ self._sender_thread.daemon = True
+ self._sender_thread.start()
+
+ def _stop_sender_thread(self):
+ # Lock ensures that nothing gets added to the queue after we disable it.
+ with self._buffer_lock:
+ if not self._queue:
+ return
+ self._queue.put(Stop)
+ self._queue = None
+
+ self._sender_thread.join()
+ self._sender_thread = None
+
+ def _sender_main_loop(self, queue):
+ while True:
+ item = queue.get()
+ if item is Stop:
+ queue.task_done()
+ return
+ self._xmit_packet_with_telemetry(item)
+ queue.task_done()
+
+ def wait_for_pending(self):
+ """
+ Flush the buffer and wait for all queued payloads to be written to the server.
+ """
+
+ self.flush()
+
+ # Avoid race with disable_background_sender. We don't need a
+ # lock, just copy the value so it doesn't change between the
+ # check and join later.
+ queue = self._queue
+
+ if queue is not None:
+ queue.join()
+
+ def pre_fork(self):
+ """Prepare client for a process fork.
+
+ Flush any pending payloads, stop all background threads and
+ close the connection. Once the function returns.
+
+ The client should not be used from this point until
+ post_fork() is called.
+ """
+ log.debug("[%d] pre_fork for %s", os.getpid(), self)
+
+ self._forking = True
+
+ with self._config_lock:
+ self._stop_flush_thread()
+ self._stop_sender_thread()
+ self.close_socket()
+
+ def post_fork(self):
+ """Restore the client state after a fork."""
+
+ log.debug("[%d] post_fork for %s", os.getpid(), self)
+
+ with self._socket_lock:
+ if self.socket or self.telemetry_socket:
+ log.warning("Open socket detected after fork. Call pre_fork() before os.fork().")
+ self.close_socket()
+
+ self._forking = False
+
+ with self._config_lock:
+ self._start_flush_thread(self._flush_interval)
+ self._start_sender_thread()
+
+ def stop(self):
+ """Stop the client.
+
+ Disable buffering, background sender and flush any pending payloads to the server.
+
+ Client remains usable after this method, but sending metrics may block if socket_timeout is enabled.
+ """
+
+ self.disable_background_sender()
+ self.disable_buffering = True
+ self.flush()
+ self.close_socket()
+
+
+statsd = DogStatsd()
diff --git a/lambdas/aws-dd-forwarder-3.127.0/datadog/dogstatsd/container.py b/lambdas/aws-dd-forwarder-3.127.0/datadog/dogstatsd/container.py
new file mode 100644
index 0000000..fe2e71c
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/datadog/dogstatsd/container.py
@@ -0,0 +1,57 @@
+# Unless explicitly stated otherwise all files in this repository are licensed under
+# the BSD-3-Clause License. This product includes software developed at Datadog
+# (https://www.datadoghq.com/).
+# Copyright 2015-Present Datadog, Inc
+
+import errno
+import re
+
+
+class UnresolvableContainerID(Exception):
+ """
+ Unable to get container ID from cgroup.
+ """
+
+
+class ContainerID(object):
+ """
+ A reader class that retrieves the current container ID parsed from a the cgroup file.
+
+ Returns:
+ object: ContainerID
+
+ Raises:
+ `NotImplementedError`: No proc filesystem is found (non-Linux systems)
+ `UnresolvableContainerID`: Unable to read the container ID
+ """
+
+ CGROUP_PATH = "/proc/self/cgroup"
+ UUID_SOURCE = r"[0-9a-f]{8}[-_][0-9a-f]{4}[-_][0-9a-f]{4}[-_][0-9a-f]{4}[-_][0-9a-f]{12}"
+ CONTAINER_SOURCE = r"[0-9a-f]{64}"
+ TASK_SOURCE = r"[0-9a-f]{32}-\d+"
+ LINE_RE = re.compile(r"^(\d+):([^:]*):(.+)$")
+ CONTAINER_RE = re.compile(r"(?:.+)?({0}|{1}|{2})(?:\.scope)?$".format(UUID_SOURCE, CONTAINER_SOURCE, TASK_SOURCE))
+
+ def __init__(self):
+ self.container_id = self._read_container_id(self.CGROUP_PATH)
+
+ def _read_container_id(self, fpath):
+ try:
+ with open(fpath, mode="r") as fp:
+ for line in fp:
+ line = line.strip()
+ match = self.LINE_RE.match(line)
+ if not match:
+ continue
+ _, _, path = match.groups()
+ parts = [p for p in path.split("/")]
+ if len(parts):
+ match = self.CONTAINER_RE.match(parts.pop())
+ if match:
+ return match.group(1)
+ except IOError as e:
+ if e.errno != errno.ENOENT:
+ raise NotImplementedError("Unable to open {}.".format(self.CGROUP_PATH))
+ except Exception as e:
+ raise UnresolvableContainerID("Unable to read the container ID: " + str(e))
+ return None
diff --git a/lambdas/aws-dd-forwarder-3.127.0/datadog/dogstatsd/context.py b/lambdas/aws-dd-forwarder-3.127.0/datadog/dogstatsd/context.py
new file mode 100644
index 0000000..90e9ce9
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/datadog/dogstatsd/context.py
@@ -0,0 +1,88 @@
+# Unless explicitly stated otherwise all files in this repository are licensed under the BSD-3-Clause License.
+# This product includes software developed at Datadog (https://www.datadoghq.com/).
+# Copyright 2015-Present Datadog, Inc
+# stdlib
+from functools import wraps
+
+try:
+ from time import monotonic # type: ignore[attr-defined]
+except ImportError:
+ from time import time as monotonic
+
+# datadog
+from datadog.dogstatsd.context_async import _get_wrapped_co
+from datadog.util.compat import iscoroutinefunction
+
+
+class TimedContextManagerDecorator(object):
+ """
+ A context manager and a decorator which will report the elapsed time in
+ the context OR in a function call.
+ """
+
+ def __init__(self, statsd, metric=None, tags=None, sample_rate=1, use_ms=None):
+ self.statsd = statsd
+ self.timing_func = statsd.timing
+ self.metric = metric
+ self.tags = tags
+ self.sample_rate = sample_rate
+ self.use_ms = use_ms
+ self.elapsed = None
+
+ def __call__(self, func):
+ """
+ Decorator which returns the elapsed time of the function call.
+
+ Default to the function name if metric was not provided.
+ """
+ if not self.metric:
+ self.metric = "%s.%s" % (func.__module__, func.__name__)
+
+ # Coroutines
+ if iscoroutinefunction(func):
+ return _get_wrapped_co(self, func)
+
+ # Others
+ @wraps(func)
+ def wrapped(*args, **kwargs):
+ start = monotonic()
+ try:
+ return func(*args, **kwargs)
+ finally:
+ self._send(start)
+
+ return wrapped
+
+ def __enter__(self):
+ if not self.metric:
+ raise TypeError("Cannot used timed without a metric!")
+ self._start = monotonic()
+ return self
+
+ def __exit__(self, type, value, traceback):
+ # Report the elapsed time of the context manager.
+ self._send(self._start)
+
+ def _send(self, start):
+ elapsed = monotonic() - start
+ use_ms = self.use_ms if self.use_ms is not None else self.statsd.use_ms
+ elapsed = int(round(1000 * elapsed)) if use_ms else elapsed
+ self.timing_func(self.metric, elapsed, self.tags, self.sample_rate)
+ self.elapsed = elapsed
+
+ def start(self):
+ self.__enter__()
+
+ def stop(self):
+ self.__exit__(None, None, None)
+
+
+class DistributedContextManagerDecorator(TimedContextManagerDecorator):
+ """
+ A context manager and a decorator which will report the elapsed time in
+ the context OR in a function call using the custom distribution metric.
+ """
+
+ def __init__(self, statsd, metric=None, tags=None, sample_rate=1, use_ms=None):
+ super(DistributedContextManagerDecorator, self).__init__(statsd, metric, tags, sample_rate, use_ms)
+ self.timing_func = statsd.distribution
diff --git a/lambdas/aws-dd-forwarder-3.127.0/datadog/dogstatsd/context_async.py b/lambdas/aws-dd-forwarder-3.127.0/datadog/dogstatsd/context_async.py
new file mode 100644
index 0000000..d178d4e
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/datadog/dogstatsd/context_async.py
@@ -0,0 +1,52 @@
+# Unless explicitly stated otherwise all files in this repository are licensed under the BSD-3-Clause License.
+# This product includes software developed at Datadog (https://www.datadoghq.com/).
+# Copyright 2015-Present Datadog, Inc
+"""
+Decorator `timed` for coroutine methods.
+
+Warning: requires Python 3.5 or higher.
+"""
+# stdlib
+import sys
+
+
+# Wrap the Python 3.5+ function in a docstring to avoid syntax errors when
+# running mypy in --py2 mode. Currently there is no way to have mypy skip an
+# entire file if it has syntax errors. This solution is very hacky; another
+# option is to specify the source files to process in mypy.ini (using glob
+# inclusion patterns), and omit this file from the list.
+#
+# https://stackoverflow.com/a/57023749/3776794
+# https://github.com/python/mypy/issues/6897
+ASYNC_SOURCE = r'''
+from functools import wraps
+try:
+ from time import monotonic
+except ImportError:
+ from time import time as monotonic
+
+
+def _get_wrapped_co(self, func):
+ """
+ `timed` wrapper for coroutine methods.
+ """
+ @wraps(func)
+ async def wrapped_co(*args, **kwargs):
+ start = monotonic()
+ try:
+ result = await func(*args, **kwargs)
+ return result
+ finally:
+ self._send(start)
+ return wrapped_co
+'''
+
+
+def _get_wrapped_co(self, func):
+ raise NotImplementedError(
+ u"Decorator `timed` compatibility with coroutine functions" u" requires Python 3.5 or higher."
+ )
+
+
+if sys.version_info >= (3, 5):
+ exec(compile(ASYNC_SOURCE, __file__, "exec"))
diff --git a/lambdas/aws-dd-forwarder-3.127.0/datadog/dogstatsd/route.py b/lambdas/aws-dd-forwarder-3.127.0/datadog/dogstatsd/route.py
new file mode 100644
index 0000000..c3fe779
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/datadog/dogstatsd/route.py
@@ -0,0 +1,40 @@
+# Unless explicitly stated otherwise all files in this repository are licensed under the BSD-3-Clause License.
+# This product includes software developed at Datadog (https://www.datadoghq.com/).
+# Copyright 2015-Present Datadog, Inc
+"""
+Helper(s), resolve the system's default interface.
+"""
+# stdlib
+import socket
+import struct
+
+
+class UnresolvableDefaultRoute(Exception):
+ """
+ Unable to resolve system's default route.
+ """
+
+
+def get_default_route():
+ """
+ Return the system default interface using the proc filesystem.
+
+ Returns:
+ string: default route
+
+ Raises:
+ `NotImplementedError`: No proc filesystem is found (non-Linux systems)
+ `StopIteration`: No default route found
+ """
+ try:
+ with open("/proc/net/route") as f:
+ for line in f.readlines():
+ fields = line.strip().split()
+ if fields[1] == "00000000":
+ return socket.inet_ntoa(struct.pack(" 0:
+ should_flush = False
+ if should_flush:
+ _get_lambda_stats().flush(float("inf"))
+
+ def __call__(self, *args, **kw):
+ warnings.warn("datadog_lambda_wrapper() is relocated to https://git.io/fjy8o", DeprecationWarning)
+ _LambdaDecorator._enter()
+ try:
+ return self.func(*args, **kw)
+ finally:
+ _LambdaDecorator._close()
+
+
+_lambda_stats = None
+datadog_lambda_wrapper = _LambdaDecorator
+
+
+def _get_lambda_stats():
+ global _lambda_stats
+ # This is not thread-safe, it should be called first by _LambdaDecorator
+ if _lambda_stats is None:
+ _lambda_stats = ThreadStats()
+ _lambda_stats.start(flush_in_greenlet=False, flush_in_thread=False)
+ return _lambda_stats
+
+
+def lambda_metric(*args, **kw):
+ """ Alias to expose only distributions for lambda functions"""
+ _get_lambda_stats().distribution(*args, **kw)
+
+
+def _init_api_client():
+ """No-op GET to initialize the requests connection with DD's endpoints
+
+ The goal here is to make the final flush faster:
+ we keep alive the Requests session, this means that we can re-use the connection
+ The consequence is that the HTTP Handshake, which can take hundreds of ms,
+ is now made at the beginning of a lambda instead of at the end.
+
+ By making the initial request async, we spare a lot of execution time in the lambdas.
+ """
+ try:
+ api.api_client.APIClient.submit("GET", "validate")
+ except Exception:
+ pass
diff --git a/lambdas/aws-dd-forwarder-3.127.0/datadog/threadstats/base.py b/lambdas/aws-dd-forwarder-3.127.0/datadog/threadstats/base.py
new file mode 100644
index 0000000..b5e7699
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/datadog/threadstats/base.py
@@ -0,0 +1,511 @@
+# Unless explicitly stated otherwise all files in this repository are licensed under the BSD-3-Clause License.
+# This product includes software developed at Datadog (https://www.datadoghq.com/).
+# Copyright 2015-Present Datadog, Inc
+"""
+ThreadStats is a tool for collecting application metrics without hindering
+performance. It collects metrics in the application thread with very little overhead
+and allows flushing metrics in process, in a thread or in a greenlet, depending
+on your application's needs.
+"""
+import atexit
+import logging
+import os
+
+# stdlib
+from contextlib import contextmanager
+from functools import wraps
+from time import time
+
+try:
+ from time import monotonic # type: ignore[attr-defined]
+except ImportError:
+ from time import time as monotonic
+
+# datadog
+from datadog.api.exceptions import ApiNotInitialized
+from datadog.threadstats.constants import MetricType
+from datadog.threadstats.events import EventsAggregator
+from datadog.threadstats.metrics import MetricsAggregator, Counter, Gauge, Histogram, Timing, Distribution, Set
+from datadog.threadstats.reporters import HttpReporter
+
+# Loggers
+log = logging.getLogger("datadog.threadstats")
+
+DD_ENV_TAGS_MAPPING = {
+ "DD_ENV": "env",
+ "DD_SERVICE": "service",
+ "DD_VERSION": "version",
+}
+
+
+class ThreadStats(object):
+ def __init__(self, namespace="", constant_tags=None, compress_payload=False):
+ """
+ Initialize a threadstats object.
+
+ :param namespace: Namespace to prefix all metric names
+ :type namespace: string
+
+ :param constant_tags: Tags to attach to every metric reported by this client
+ :type constant_tags: list of strings
+
+ :param compress_payload: compress the payload using zlib
+ :type compress_payload: bool
+
+ :envvar DATADOG_TAGS: Tags to attach to every metric reported by ThreadStats client
+ :type DATADOG_TAGS: comma-delimited string
+
+ :envvar DD_ENV: the env of the service running the ThreadStats client.
+ If set, it is appended to the constant (global) tags of the client.
+ :type DD_ENV: string
+
+ :envvar DD_SERVICE: the name of the service running the ThreadStats client.
+ If set, it is appended to the constant (global) tags of the client.
+ :type DD_SERVICE: string
+
+ :envvar DD_VERSION: the version of the service running the ThreadStats client.
+ If set, it is appended to the constant (global) tags of the client.
+ :type DD_VERSION: string
+ """
+ # Parameters
+ self.namespace = namespace
+ env_tags = [tag for tag in os.environ.get("DATADOG_TAGS", "").split(",") if tag]
+ for var, tag_name in DD_ENV_TAGS_MAPPING.items():
+ value = os.environ.get(var, "")
+ if value:
+ env_tags.append("{name}:{value}".format(name=tag_name, value=value))
+ if constant_tags is None:
+ constant_tags = []
+ self.constant_tags = constant_tags + env_tags
+
+ # State
+ self._disabled = True
+ self.compress_payload = compress_payload
+
+ def start(
+ self,
+ flush_interval=10,
+ roll_up_interval=10,
+ device=None,
+ flush_in_thread=True,
+ flush_in_greenlet=False,
+ disabled=False,
+ ):
+ """
+ Start the ThreadStats instance with the specified metric flushing method and preferences.
+
+ By default, metrics will be flushed in a thread.
+
+ >>> stats.start()
+
+ If you're running a gevent server and want to flush metrics in a
+ greenlet, set *flush_in_greenlet* to True. Be sure to import and monkey
+ patch gevent before starting ThreadStats. ::
+
+ >>> from gevent import monkey; monkey.patch_all()
+ >>> stats.start(flush_in_greenlet=True)
+
+ If you'd like to flush metrics in process, set *flush_in_thread*
+ to False, though you'll have to call ``flush`` manually to post metrics
+ to the server. ::
+
+ >>> stats.start(flush_in_thread=False)
+
+ If for whatever reason, you need to disable metrics collection in a
+ hurry, set ``disabled`` to True and metrics won't be collected or flushed.
+
+ >>> stats.start(disabled=True)
+
+ *Note:* Please remember to set your API key before,
+ using datadog module ``initialize`` method.
+
+ >>> from datadog import initialize, ThreadStats
+ >>> initialize(api_key="my_api_key")
+ >>> stats = ThreadStats()
+ >>> stats.start()
+ >>> stats.increment("home.page.hits")
+
+ :param flush_interval: The number of seconds to wait between flushes.
+ :type flush_interval: int
+ :param flush_in_thread: True if you'd like to spawn a thread to flush metrics.
+ It will run every `flush_interval` seconds.
+ :type flush_in_thread: bool
+ :param flush_in_greenlet: Set to true if you'd like to flush in a gevent greenlet.
+ :type flush_in_greenlet: bool
+ :param disabled: Disable metrics collection
+ :type disabled: bool
+ """
+ self.flush_interval = flush_interval
+ self.roll_up_interval = roll_up_interval
+ self.device = device
+ self._disabled = disabled
+ self._is_auto_flushing = False
+
+ # Create an aggregator
+ self._metric_aggregator = MetricsAggregator(self.roll_up_interval)
+ self._event_aggregator = EventsAggregator()
+
+ # The reporter is responsible for sending metrics off to their final destination.
+ # It's abstracted to support easy unit testing and in the near future, forwarding
+ # to the datadog agent.
+ self.reporter = HttpReporter(compress_payload=self.compress_payload)
+
+ self._is_flush_in_progress = False
+ self.flush_count = 0
+ if self._disabled:
+ log.info("ThreadStats instance is disabled. No metrics will flush.")
+ else:
+ if flush_in_greenlet:
+ self._start_flush_greenlet()
+ elif flush_in_thread:
+ self._start_flush_thread()
+
+ # Flush all remaining metrics on exit
+ atexit.register(lambda: self.flush(float("inf")))
+
+ def stop(self):
+ if not self._is_auto_flushing:
+ return True
+ if self._flush_thread:
+ self._flush_thread.end()
+ self._is_auto_flushing = False
+ return True
+
+ def event(
+ self,
+ title,
+ message,
+ alert_type=None,
+ aggregation_key=None,
+ source_type_name=None,
+ date_happened=None,
+ priority=None,
+ tags=None,
+ hostname=None,
+ ):
+ """
+ Send an event. See http://docs.datadoghq.com/api/ for more info.
+
+ >>> stats.event("Man down!", "This server needs assistance.")
+ >>> stats.event("The web server restarted", \
+ "The web server is up again", alert_type="success")
+ """
+ if not self._disabled:
+ # Append all client level tags to every event
+ event_tags = tags
+ if self.constant_tags:
+ if tags:
+ event_tags = tags + self.constant_tags
+ else:
+ event_tags = self.constant_tags
+
+ self._event_aggregator.add_event(
+ title=title,
+ text=message,
+ alert_type=alert_type,
+ aggregation_key=aggregation_key,
+ source_type_name=source_type_name,
+ date_happened=date_happened,
+ priority=priority,
+ tags=event_tags,
+ host=hostname,
+ )
+
+ def gauge(self, metric_name, value, timestamp=None, tags=None, sample_rate=1, host=None):
+ """
+ Record the current ``value`` of a metric. The most recent value in
+ a given flush interval will be recorded. Optionally, specify a set of
+ tags to associate with the metric. This should be used for sum values
+ such as total hard disk space, process uptime, total number of active
+ users, or number of rows in a database table.
+
+ >>> stats.gauge("process.uptime", time.time() - process_start_time)
+ >>> stats.gauge("cache.bytes.free", cache.get_free_bytes(), tags=["version:1.0"])
+ """
+ if not self._disabled:
+ self._metric_aggregator.add_point(
+ metric_name, tags, timestamp or time(), value, Gauge, sample_rate=sample_rate, host=host
+ )
+
+ def set(self, metric_name, value, timestamp=None, tags=None, sample_rate=1, host=None):
+ """
+ Add ``value`` to the current set. The length of the set is
+ flushed as a gauge to Datadog. Optionally, specify a set of
+ tags to associate with the metric.
+
+ >>> stats.set("example_metric.set", "value_1", tags=["environment:dev"])
+ """
+ if not self._disabled:
+ self._metric_aggregator.add_point(
+ metric_name, tags, timestamp or time(), value, Set, sample_rate=sample_rate, host=host
+ )
+
+ def increment(self, metric_name, value=1, timestamp=None, tags=None, sample_rate=1, host=None):
+ """
+ Increment the counter by the given ``value``. Optionally, specify a list of
+ ``tags`` to associate with the metric. This is useful for counting things
+ such as incrementing a counter each time a page is requested.
+
+ >>> stats.increment('home.page.hits')
+ >>> stats.increment('bytes.processed', file.size())
+ """
+ if not self._disabled:
+ self._metric_aggregator.add_point(
+ metric_name, tags, timestamp or time(), value, Counter, sample_rate=sample_rate, host=host
+ )
+
+ def decrement(self, metric_name, value=1, timestamp=None, tags=None, sample_rate=1, host=None):
+ """
+ Decrement a counter, optionally setting a value, tags and a sample
+ rate.
+
+ >>> stats.decrement("files.remaining")
+ >>> stats.decrement("active.connections", 2)
+ """
+ if not self._disabled:
+ self._metric_aggregator.add_point(
+ metric_name, tags, timestamp or time(), -value, Counter, sample_rate=sample_rate, host=host
+ )
+
+ def histogram(self, metric_name, value, timestamp=None, tags=None, sample_rate=1, host=None):
+ """
+ Sample a histogram value. Histograms will produce metrics that
+ describe the distribution of the recorded values, namely the maximum, minimum,
+ average, count and the 75/85/95/99 percentiles. Optionally, specify
+ a list of ``tags`` to associate with the metric.
+
+ >>> stats.histogram("uploaded_file.size", uploaded_file.size())
+ """
+ if not self._disabled:
+ self._metric_aggregator.add_point(
+ metric_name, tags, timestamp or time(), value, Histogram, sample_rate=sample_rate, host=host
+ )
+
+ def distribution(self, metric_name, value, timestamp=None, tags=None, sample_rate=1, host=None):
+ """
+ Sample a distribution value. Distributions will produce metrics that
+ describe the distribution of the recorded values, namely the maximum,
+ median, average, count and the 50/75/90/95/99 percentiles. Optionally,
+ specify a list of ``tags`` to associate with the metric.
+
+ >>> stats.distribution("uploaded_file.size", uploaded_file.size())
+ """
+ if not self._disabled:
+ self._metric_aggregator.add_point(
+ metric_name, tags, timestamp or time(), value, Distribution, sample_rate=sample_rate, host=host
+ )
+
+ def timing(self, metric_name, value, timestamp=None, tags=None, sample_rate=1, host=None):
+ """
+ Record a timing, optionally setting tags and a sample rate.
+
+ >>> stats.timing("query.response.time", 1234)
+ """
+ if not self._disabled:
+ self._metric_aggregator.add_point(
+ metric_name, tags, timestamp or time(), value, Timing, sample_rate=sample_rate, host=host
+ )
+
+ @contextmanager
+ def timer(self, metric_name, sample_rate=1, tags=None, host=None):
+ """
+ A context manager that will track the distribution of the contained code's run time.
+ Optionally specify a list of tags to associate with the metric.
+ ::
+
+ def get_user(user_id):
+ with stats.timer("user.query.time"):
+ # Do what you need to ...
+ pass
+
+ # Is equivalent to ...
+ def get_user(user_id):
+ start = time.time()
+ try:
+ # Do what you need to ...
+ pass
+ finally:
+ stats.histogram("user.query.time", time.time() - start)
+ """
+ start = monotonic()
+ try:
+ yield
+ finally:
+ end = monotonic()
+ self.timing(metric_name, end - start, time(), tags=tags, sample_rate=sample_rate, host=host)
+
+ def timed(self, metric_name, sample_rate=1, tags=None, host=None):
+ """
+ A decorator that will track the distribution of a function's run time.
+ Optionally specify a list of tags to associate with the metric.
+ ::
+
+ @stats.timed("user.query.time")
+ def get_user(user_id):
+ # Do what you need to ...
+ pass
+
+ # Is equivalent to ...
+ start = time.time()
+ try:
+ get_user(user_id)
+ finally:
+ stats.histogram("user.query.time", time.time() - start)
+ """
+
+ def wrapper(func):
+ @wraps(func)
+ def wrapped(*args, **kwargs):
+ with self.timer(metric_name, sample_rate, tags, host):
+ result = func(*args, **kwargs)
+ return result
+
+ return wrapped
+
+ return wrapper
+
+ def flush(self, timestamp=None):
+ """
+ Flush and post all metrics to the server. Note that this is a blocking
+ call, so it is likely not suitable for user facing processes. In those
+ cases, it's probably best to flush in a thread or greenlet.
+ """
+ try:
+ if self._is_flush_in_progress:
+ log.debug("A flush is already in progress. Skipping this one.")
+ return False
+ if self._disabled:
+ log.info("Not flushing because we're disabled.")
+ return False
+
+ self._is_flush_in_progress = True
+
+ # Process metrics
+ metrics, dists = self._get_aggregate_metrics_and_dists(timestamp or time())
+ count_metrics = len(metrics)
+ if count_metrics:
+ self.flush_count += 1
+ log.debug("Flush #%s sending %s metrics" % (self.flush_count, count_metrics))
+ self.reporter.flush_metrics(metrics)
+ else:
+ log.debug("No metrics to flush. Continuing.")
+
+ count_dists = len(dists)
+ if count_dists:
+ self.flush_count += 1
+ log.debug("Flush #%s sending %s distributions" % (self.flush_count, count_dists))
+ self.reporter.flush_distributions(dists)
+ else:
+ log.debug("No distributions to flush. Continuing.")
+
+ # Process events
+ events = self._get_aggregate_events()
+ count_events = len(events)
+ if count_events:
+ self.flush_count += 1
+ log.debug("Flush #%s sending %s events" % (self.flush_count, count_events))
+ self.reporter.flush_events(events)
+ else:
+ log.debug("No events to flush. Continuing.")
+ except ApiNotInitialized:
+ raise
+ except Exception:
+ try:
+ log.exception("Error flushing metrics and events")
+ except Exception:
+ pass
+ finally:
+ self._is_flush_in_progress = False
+
+ def _get_aggregate_metrics_and_dists(self, flush_time=None):
+ """
+ Get, format and return the rolled up metrics from the aggregator.
+ """
+ # Get rolled up metrics
+ rolled_up_metrics = self._metric_aggregator.flush(flush_time)
+
+ # FIXME: emit a dictionary from the aggregator
+ metrics = []
+ dists = []
+ for timestamp, value, name, tags, host, metric_type, interval in rolled_up_metrics:
+ metric_tags = tags
+ metric_name = name
+
+ # Append all client level tags to every metric
+ if self.constant_tags:
+ if tags:
+ metric_tags = tags + self.constant_tags
+ else:
+ metric_tags = self.constant_tags
+
+ # Resolve the metric name
+ if self.namespace:
+ metric_name = self.namespace + "." + name
+
+ metric = {
+ "metric": metric_name,
+ "points": [[timestamp, value]],
+ "type": metric_type,
+ "host": host,
+ "device": self.device,
+ "tags": metric_tags,
+ "interval": interval,
+ }
+ if metric_type == MetricType.Distribution:
+ dists.append(metric)
+ else:
+ metrics.append(metric)
+ return (metrics, dists)
+
+ def _get_aggregate_events(self):
+ # Get events
+ events = self._event_aggregator.flush()
+ return events
+
+ def _start_flush_thread(self):
+ """ Start a thread to flush metrics. """
+ from datadog.threadstats.periodic_timer import PeriodicTimer
+
+ if self._is_auto_flushing:
+ log.info("Autoflushing already started.")
+ return
+ self._is_auto_flushing = True
+
+ # A small helper for logging and flushing.
+ def flush():
+ try:
+ log.debug("Flushing metrics in thread")
+ self.flush()
+ except Exception:
+ try:
+ log.exception("Error flushing in thread")
+ except Exception:
+ pass
+
+ log.info("Starting flush thread with interval %s." % self.flush_interval)
+ self._flush_thread = PeriodicTimer(self.flush_interval, flush)
+ self._flush_thread.start()
+
+ def _start_flush_greenlet(self):
+ if self._is_auto_flushing:
+ log.info("Autoflushing already started.")
+ return
+ self._is_auto_flushing = True
+
+ import gevent
+
+ # A small helper for flushing.
+ def flush():
+ while True:
+ try:
+ log.debug("Flushing metrics in greenlet")
+ self.flush()
+ gevent.sleep(self.flush_interval)
+ except Exception:
+ try:
+ log.exception("Error flushing in greenlet")
+ except Exception:
+ pass
+
+ log.info("Starting flush greenlet with interval %s." % self.flush_interval)
+ gevent.spawn(flush)
diff --git a/lambdas/aws-dd-forwarder-3.127.0/datadog/threadstats/constants.py b/lambdas/aws-dd-forwarder-3.127.0/datadog/threadstats/constants.py
new file mode 100644
index 0000000..63b565d
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/datadog/threadstats/constants.py
@@ -0,0 +1,18 @@
+# Unless explicitly stated otherwise all files in this repository are licensed under the BSD-3-Clause License.
+# This product includes software developed at Datadog (https://www.datadoghq.com/).
+# Copyright 2015-Present Datadog, Inc
+
+
+class MetricType(object):
+ Gauge = "gauge"
+ Counter = "counter"
+ Histogram = "histogram"
+ Rate = "rate"
+ Distribution = "distribution"
+
+
+class MonitorType(object):
+ SERVICE_CHECK = "service check"
+ METRIC_ALERT = "metric alert"
+ QUERY_ALERT = "query alert"
+ ALL = (SERVICE_CHECK, METRIC_ALERT, QUERY_ALERT)
diff --git a/lambdas/aws-dd-forwarder-3.127.0/datadog/threadstats/events.py b/lambdas/aws-dd-forwarder-3.127.0/datadog/threadstats/events.py
new file mode 100644
index 0000000..a85c798
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/datadog/threadstats/events.py
@@ -0,0 +1,27 @@
+# Unless explicitly stated otherwise all files in this repository are licensed under the BSD-3-Clause License.
+# This product includes software developed at Datadog (https://www.datadoghq.com/).
+# Copyright 2015-Present Datadog, Inc
+"""
+Event aggregator class.
+"""
+
+from datadog.util.compat import iteritems
+
+
+class EventsAggregator(object):
+ """
+ A simple event aggregator
+ """
+
+ def __init__(self):
+ self._events = []
+
+ def add_event(self, **event):
+ # Clean empty values
+ event = dict((k, v) for k, v in iteritems(event) if v is not None)
+ self._events.append(event)
+
+ def flush(self):
+ events = self._events
+ self._events = []
+ return events
diff --git a/lambdas/aws-dd-forwarder-3.127.0/datadog/threadstats/metrics.py b/lambdas/aws-dd-forwarder-3.127.0/datadog/threadstats/metrics.py
new file mode 100644
index 0000000..aa9fef5
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/datadog/threadstats/metrics.py
@@ -0,0 +1,203 @@
+# Unless explicitly stated otherwise all files in this repository are licensed under the BSD-3-Clause License.
+# This product includes software developed at Datadog (https://www.datadoghq.com/).
+# Copyright 2015-Present Datadog, Inc
+"""
+Metric roll-up classes.
+"""
+from collections import defaultdict
+import random
+import itertools
+import threading
+
+from datadog.util.compat import iternext
+from datadog.threadstats.constants import MetricType
+
+
+class Metric(object):
+ """
+ A base metric class that accepts points, slices them into time intervals
+ and performs roll-ups within those intervals.
+ """
+
+ def add_point(self, value):
+ """ Add a point to the given metric. """
+ raise NotImplementedError()
+
+ def flush(self, timestamp, interval):
+ """ Flush all metrics up to the given timestamp. """
+ raise NotImplementedError()
+
+
+class Set(Metric):
+ """ A set metric. """
+
+ stats_tag = "g"
+
+ def __init__(self, name, tags, host):
+ self.name = name
+ self.tags = tags
+ self.host = host
+ self.set = set()
+
+ def add_point(self, value):
+ self.set.add(value)
+
+ def flush(self, timestamp, interval):
+ return [(timestamp, len(self.set), self.name, self.tags, self.host, MetricType.Gauge, interval)]
+
+
+class Gauge(Metric):
+ """ A gauge metric. """
+
+ stats_tag = "g"
+
+ def __init__(self, name, tags, host):
+ self.name = name
+ self.tags = tags
+ self.host = host
+ self.value = None
+
+ def add_point(self, value):
+ self.value = value
+
+ def flush(self, timestamp, interval):
+ return [(timestamp, self.value, self.name, self.tags, self.host, MetricType.Gauge, interval)]
+
+
+class Counter(Metric):
+ """ A metric that tracks a counter value. """
+
+ stats_tag = "c"
+
+ def __init__(self, name, tags, host):
+ self.name = name
+ self.tags = tags
+ self.host = host
+ self.count = []
+
+ def add_point(self, value):
+ self.count.append(value)
+
+ def flush(self, timestamp, interval):
+ count = sum(self.count, 0)
+ return [(timestamp, count / float(interval), self.name, self.tags, self.host, MetricType.Rate, interval)]
+
+
+class Distribution(Metric):
+ """ A distribution metric. """
+
+ stats_tag = "d"
+
+ def __init__(self, name, tags, host):
+ self.name = name
+ self.tags = tags
+ self.host = host
+ self.value = []
+
+ def add_point(self, value):
+ self.value.append(value)
+
+ def flush(self, timestamp, interval):
+ return [(timestamp, self.value, self.name, self.tags, self.host, MetricType.Distribution, interval)]
+
+
+class Histogram(Metric):
+ """ A histogram metric. """
+
+ stats_tag = "h"
+
+ def __init__(self, name, tags, host):
+ self.name = name
+ self.tags = tags
+ self.host = host
+ self.max = float("-inf")
+ self.min = float("inf")
+ self.sum = []
+ self.iter_counter = itertools.count()
+ self.count = iternext(self.iter_counter)
+ self.sample_size = 1000
+ self.samples = []
+ self.percentiles = [0.75, 0.85, 0.95, 0.99]
+
+ def add_point(self, value):
+ self.max = self.max if self.max > value else value
+ self.min = self.min if self.min < value else value
+ self.sum.append(value)
+ if self.count < self.sample_size:
+ self.samples.append(value)
+ else:
+ self.samples[random.randrange(0, self.sample_size)] = value
+ self.count = iternext(self.iter_counter)
+
+ def flush(self, timestamp, interval):
+ if not self.count:
+ return []
+ metrics = [
+ (timestamp, self.min, "%s.min" % self.name, self.tags, self.host, MetricType.Gauge, interval),
+ (timestamp, self.max, "%s.max" % self.name, self.tags, self.host, MetricType.Gauge, interval),
+ (
+ timestamp,
+ self.count / float(interval),
+ "%s.count" % self.name,
+ self.tags,
+ self.host,
+ MetricType.Rate,
+ interval,
+ ),
+ (timestamp, self.average(), "%s.avg" % self.name, self.tags, self.host, MetricType.Gauge, interval),
+ ]
+ length = len(self.samples)
+ self.samples.sort()
+ for p in self.percentiles:
+ val = self.samples[int(round(p * length - 1))]
+ name = "%s.%spercentile" % (self.name, int(p * 100))
+ metrics.append((timestamp, val, name, self.tags, self.host, MetricType.Gauge, interval))
+ return metrics
+
+ def average(self):
+ sum_metrics = sum(self.sum, 0)
+ return float(sum_metrics) / self.count
+
+
+class Timing(Histogram):
+ """
+ A timing metric.
+ Inherit from Histogram to workaround and support it in API mode
+ """
+
+ stats_tag = "ms"
+
+
+class MetricsAggregator(object):
+ """
+ A small class to handle the roll-ups of multiple metrics at once.
+ """
+
+ def __init__(self, roll_up_interval=10):
+ self._lock = threading.RLock()
+ self._metrics = defaultdict(lambda: {})
+ self._roll_up_interval = roll_up_interval
+
+ def add_point(self, metric, tags, timestamp, value, metric_class, sample_rate=1, host=None):
+ # The sample rate is currently ignored for in process stuff
+ interval = timestamp - timestamp % self._roll_up_interval
+ key = (metric, host, tuple(sorted(tags)) if tags else None)
+ with self._lock:
+ if key not in self._metrics[interval]:
+ self._metrics[interval][key] = metric_class(metric, tags, host)
+ self._metrics[interval][key].add_point(value)
+
+ def flush(self, timestamp):
+ """ Flush all metrics up to the given timestamp. """
+ if timestamp == float("inf"):
+ interval = float("inf")
+ else:
+ interval = timestamp - timestamp % self._roll_up_interval
+
+ with self._lock:
+ past_intervals = [i for i in self._metrics.keys() if i < interval]
+ metrics = []
+ for i in past_intervals:
+ for m in list(self._metrics.pop(i).values()):
+ metrics += m.flush(i, self._roll_up_interval)
+ return metrics
diff --git a/lambdas/aws-dd-forwarder-3.127.0/datadog/threadstats/periodic_timer.py b/lambdas/aws-dd-forwarder-3.127.0/datadog/threadstats/periodic_timer.py
new file mode 100644
index 0000000..ff4b583
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/datadog/threadstats/periodic_timer.py
@@ -0,0 +1,36 @@
+# Unless explicitly stated otherwise all files in this repository are licensed under the BSD-3-Clause License.
+# This product includes software developed at Datadog (https://www.datadoghq.com/).
+# Copyright 2015-Present Datadog, Inc
+"""
+A small class to run a task periodically in a thread.
+"""
+
+
+from threading import Thread, Event
+import sys
+
+
+class PeriodicTimer(Thread):
+ def __init__(self, interval, function, *args, **kwargs):
+ Thread.__init__(self)
+ self.daemon = True
+ assert interval > 0
+ self.interval = interval
+ assert function
+ self.function = function
+ self.args = args
+ self.kwargs = kwargs
+ self.finished = Event()
+
+ def end(self):
+ self.finished.set()
+
+ def run(self):
+ while not self.finished.wait(self.interval):
+ try:
+ self.function(*self.args, **self.kwargs)
+ except Exception:
+ # If `sys` is None, it means the interpreter is shutting down
+ # and it's very likely the reason why we got an exception.
+ if sys is not None:
+ raise
diff --git a/lambdas/aws-dd-forwarder-3.127.0/datadog/threadstats/reporters.py b/lambdas/aws-dd-forwarder-3.127.0/datadog/threadstats/reporters.py
new file mode 100644
index 0000000..1324794
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/datadog/threadstats/reporters.py
@@ -0,0 +1,34 @@
+# Unless explicitly stated otherwise all files in this repository are licensed under the BSD-3-Clause License.
+# This product includes software developed at Datadog (https://www.datadoghq.com/).
+# Copyright 2015-Present Datadog, Inc
+"""
+Reporter classes.
+"""
+
+
+from datadog import api
+
+
+class Reporter(object):
+ def flush(self, metrics):
+ raise NotImplementedError()
+
+
+class HttpReporter(Reporter):
+ def __init__(self, compress_payload=False):
+ self.compress_payload = compress_payload
+
+ def flush_distributions(self, distributions):
+ api.Distribution.send(distributions, compress_payload=self.compress_payload)
+
+ def flush_metrics(self, metrics):
+ api.Metric.send(metrics, compress_payload=self.compress_payload)
+
+ def flush_events(self, events):
+ for event in events:
+ api.Event.create(**event)
+
+
+class GraphiteReporter(Reporter):
+ def flush(self, metrics):
+ pass
diff --git a/lambdas/aws-dd-forwarder-3.127.0/datadog/util/__init__.py b/lambdas/aws-dd-forwarder-3.127.0/datadog/util/__init__.py
new file mode 100644
index 0000000..b3017a1
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/datadog/util/__init__.py
@@ -0,0 +1,3 @@
+# Unless explicitly stated otherwise all files in this repository are licensed under the BSD-3-Clause License.
+# This product includes software developed at Datadog (https://www.datadoghq.com/).
+# Copyright 2015-Present Datadog, Inc
diff --git a/lambdas/aws-dd-forwarder-3.127.0/datadog/util/cli.py b/lambdas/aws-dd-forwarder-3.127.0/datadog/util/cli.py
new file mode 100644
index 0000000..f309980
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/datadog/util/cli.py
@@ -0,0 +1,152 @@
+# Unless explicitly stated otherwise all files in this repository are licensed under the BSD-3-Clause License.
+# This product includes software developed at Datadog (https://www.datadoghq.com/).
+# Copyright 2015-Present Datadog, Inc
+from datetime import datetime, timedelta
+from argparse import ArgumentTypeError
+import json
+import re
+from datadog.util.format import force_to_epoch_seconds
+import time
+
+
+def comma_list(list_str, item_func=None):
+ if not list_str:
+ raise ArgumentTypeError("Invalid comma list")
+ item_func = item_func or (lambda i: i)
+ return [item_func(i.strip()) for i in list_str.split(",") if i.strip()]
+
+
+def comma_set(list_str, item_func=None):
+ return set(comma_list(list_str, item_func=item_func))
+
+
+def comma_list_or_empty(list_str):
+ if not list_str:
+ return []
+ else:
+ return comma_list(list_str)
+
+
+def list_of_ints(int_csv):
+ if not int_csv:
+ raise ArgumentTypeError("Invalid list of ints")
+ try:
+ # Try as a [1, 2, 3] list
+ j = json.loads(int_csv)
+ if isinstance(j, (list, set)):
+ j = [int(i) for i in j]
+ return j
+ except Exception:
+ pass
+
+ try:
+ return [int(i.strip()) for i in int_csv.strip().split(",")]
+ except Exception:
+ raise ArgumentTypeError("Invalid list of ints: {0}".format(int_csv))
+
+
+def list_of_ints_and_strs(csv):
+ def int_or_str(item):
+ try:
+ return int(item)
+ except ValueError:
+ return item
+
+ return comma_list(csv, int_or_str)
+
+
+def set_of_ints(int_csv):
+ return set(list_of_ints(int_csv))
+
+
+class DateParsingError(Exception):
+ """Thrown if parse_date exhausts all possible parsings of a string"""
+
+
+_date_fieldre = re.compile(r"(\d+)\s?(\w+) (ago|ahead)")
+
+
+def _midnight():
+ """ Truncate a date to midnight. Default to UTC midnight today."""
+ return datetime.utcnow().replace(hour=0, minute=0, second=0, microsecond=0)
+
+
+def parse_date_as_epoch_timestamp(date_str):
+ return parse_date(date_str, to_epoch_ts=True)
+
+
+def _parse_date_noop_formatter(d):
+ """ NOOP - only here for pylint """
+ return d
+
+
+def parse_date(date_str, to_epoch_ts=False):
+ formatter = _parse_date_noop_formatter
+ if to_epoch_ts:
+ formatter = force_to_epoch_seconds
+
+ if isinstance(date_str, datetime):
+ return formatter(date_str)
+ elif isinstance(date_str, time.struct_time):
+ return formatter(datetime.fromtimestamp(time.mktime(date_str)))
+
+ # Parse relative dates.
+ if date_str == "today":
+ return formatter(_midnight())
+ elif date_str == "yesterday":
+ return formatter(_midnight() - timedelta(days=1))
+ elif date_str == "tomorrow":
+ return formatter(_midnight() + timedelta(days=1))
+ elif date_str.endswith(("ago", "ahead")):
+ m = _date_fieldre.match(date_str)
+ if m:
+ fields = m.groups()
+ else:
+ fields = date_str.split(" ")[1:]
+ num = int(fields[0])
+ short_unit = fields[1]
+ time_direction = {"ago": -1, "ahead": 1}[fields[2]]
+ assert short_unit, short_unit
+ units = ["weeks", "days", "hours", "minutes", "seconds"]
+ # translate 'h' -> 'hours'
+ short_units = dict([(u[:1], u) for u in units])
+ unit = short_units.get(short_unit, short_unit)
+ # translate 'hour' -> 'hours'
+ if unit[-1] != "s":
+ unit += "s" # tolerate 1 hour
+ assert unit in units, "'%s' not in %s" % (unit, units)
+ return formatter(datetime.utcnow() + time_direction * timedelta(**{unit: num}))
+ elif date_str == "now":
+ return formatter(datetime.utcnow())
+
+ def _from_epoch_timestamp(seconds):
+ print("_from_epoch_timestamp({})".format(seconds))
+ return datetime.utcfromtimestamp(float(seconds))
+
+ def _from_epoch_ms_timestamp(millis):
+ print("_from_epoch_ms_timestamp({})".format(millis))
+ in_sec = float(millis) / 1000.0
+ print("_from_epoch_ms_timestamp({}) -> {}".format(millis, in_sec))
+ return _from_epoch_timestamp(in_sec)
+
+ # Or parse date formats (most specific to least specific)
+ parse_funcs = [
+ lambda d: datetime.strptime(d, "%Y-%m-%d %H:%M:%S.%f"),
+ lambda d: datetime.strptime(d, "%Y-%m-%d %H:%M:%S"),
+ lambda d: datetime.strptime(d, "%Y-%m-%dT%H:%M:%S.%f"),
+ lambda d: datetime.strptime(d, "%Y-%m-%dT%H:%M:%S"),
+ lambda d: datetime.strptime(d, "%Y-%m-%d %H:%M"),
+ lambda d: datetime.strptime(d, "%Y-%m-%d-%H"),
+ lambda d: datetime.strptime(d, "%Y-%m-%d"),
+ lambda d: datetime.strptime(d, "%Y-%m"),
+ lambda d: datetime.strptime(d, "%Y"),
+ _from_epoch_timestamp, # an epoch in seconds
+ _from_epoch_ms_timestamp, # an epoch in milliseconds
+ ]
+
+ for parse_func in parse_funcs:
+ try:
+ return formatter(parse_func(date_str))
+ except Exception:
+ pass
+ raise DateParsingError(u"Could not parse {0} as date".format(date_str))
diff --git a/lambdas/aws-dd-forwarder-3.127.0/datadog/util/compat.py b/lambdas/aws-dd-forwarder-3.127.0/datadog/util/compat.py
new file mode 100644
index 0000000..58927d1
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/datadog/util/compat.py
@@ -0,0 +1,135 @@
+# Unless explicitly stated otherwise all files in this repository are licensed under the BSD-3-Clause License.
+# This product includes software developed at Datadog (https://www.datadoghq.com/).
+# Copyright 2015-Present Datadog, Inc
+# flake8: noqa
+"""
+Imports for compatibility with Python 2, Python 3 and Google App Engine.
+"""
+from functools import wraps
+import logging
+import socket
+import sys
+
+# Logging
+log = logging.getLogger("datadog.util")
+
+# Note: using `sys.version_info` instead of the helper functions defined here
+# so that mypy detects version-specific code paths. Currently, mypy doesn't
+# support try/except imports for version-specific code paths either.
+#
+# https://mypy.readthedocs.io/en/stable/common_issues.html#python-version-and-system-platform-checks
+
+# Python 3.x
+if sys.version_info[0] >= 3:
+ import builtins
+ from collections import UserDict as IterableUserDict
+ import configparser
+ from configparser import ConfigParser
+ from io import StringIO
+ from urllib.parse import urljoin, urlparse
+ import urllib.request as url_lib, urllib.error, urllib.parse
+
+ imap = map
+ get_input = input
+ text = str
+
+ def iteritems(d):
+ return iter(d.items())
+
+ def iternext(iter):
+ return next(iter)
+
+
+# Python 2.x
+else:
+ import __builtin__ as builtins
+ import ConfigParser as configparser
+ from configparser import ConfigParser
+ from cStringIO import StringIO
+ from itertools import imap
+ import urllib2 as url_lib
+ from urlparse import urljoin, urlparse
+ from UserDict import IterableUserDict
+
+ get_input = raw_input
+ text = unicode
+
+ def iteritems(d):
+ return d.iteritems()
+
+ def iternext(iter):
+ return iter.next()
+
+
+# Python >= 3.5
+if sys.version_info >= (3, 5):
+ from asyncio import iscoroutinefunction
+# Others
+else:
+
+ def iscoroutinefunction(*args, **kwargs):
+ return False
+
+
+# Python >= 2.7
+if sys.version_info >= (2, 7):
+ from logging import NullHandler
+# Python 2.6.x
+else:
+ from logging import Handler
+
+ class NullHandler(Handler):
+ def emit(self, record):
+ pass
+
+
+def _is_py_version_higher_than(major, minor=0):
+ """
+ Assert that the Python version is higher than `$maj.$min`.
+ """
+ return sys.version_info >= (major, minor)
+
+
+def is_p3k():
+ """
+ Assert that Python is version 3 or higher.
+ """
+ return _is_py_version_higher_than(3)
+
+
+def is_higher_py32():
+ """
+ Assert that Python is version 3.2 or higher.
+ """
+ return _is_py_version_higher_than(3, 2)
+
+
+def is_higher_py35():
+ """
+ Assert that Python is version 3.5 or higher.
+ """
+ return _is_py_version_higher_than(3, 5)
+
+
+def is_pypy():
+ """
+ Assert that PyPy is being used (regardless of 2 or 3)
+ """
+ return "__pypy__" in sys.builtin_module_names
+
+
+def conditional_lru_cache(func):
+ """
+ A decorator that conditionally enables a lru_cache of size 512 if
+ the version of Python can support it (>3.2) and otherwise returns
+ the original function
+ """
+ if not is_higher_py32():
+ return func
+
+ log.debug("Enabling LRU cache for function %s", func.__name__)
+
+ # pylint: disable=import-outside-toplevel
+ from functools import lru_cache
+
+ return lru_cache(maxsize=512)(func)
diff --git a/lambdas/aws-dd-forwarder-3.127.0/datadog/util/config.py b/lambdas/aws-dd-forwarder-3.127.0/datadog/util/config.py
new file mode 100644
index 0000000..cd186bc
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/datadog/util/config.py
@@ -0,0 +1,148 @@
+# Unless explicitly stated otherwise all files in this repository are licensed under the BSD-3-Clause License.
+# This product includes software developed at Datadog (https://www.datadoghq.com/).
+# Copyright 2015-Present Datadog, Inc
+import os
+import string
+import sys
+
+# datadog
+from datadog.util.compat import configparser, StringIO, is_p3k
+from datadog.version import __version__
+
+# CONSTANTS
+DATADOG_CONF = "datadog.conf"
+
+
+class CfgNotFound(Exception):
+ pass
+
+
+class PathNotFound(Exception):
+ pass
+
+
+def get_os():
+ "Human-friendly OS name"
+ if sys.platform == "darwin":
+ return "mac"
+ elif sys.platform.find("freebsd") != -1:
+ return "freebsd"
+ elif sys.platform.find("linux") != -1:
+ return "linux"
+ elif sys.platform.find("win32") != -1:
+ return "windows"
+ elif sys.platform.find("sunos") != -1:
+ return "solaris"
+ else:
+ return sys.platform
+
+
+def skip_leading_wsp(f):
+ "Works on a file, returns a file-like object"
+ if is_p3k():
+ return StringIO("\n".join(x.strip(" ") for x in f.readlines()))
+ else:
+ return StringIO("\n".join(map(string.strip, f.readlines())))
+
+
+def _windows_commondata_path():
+ """Return the common appdata path, using ctypes
+ From http://stackoverflow.com/questions/626796/\
+ how-do-i-find-the-windows-common-application-data-folder-using-python
+ """
+ import ctypes
+ from ctypes import wintypes, windll
+
+ CSIDL_COMMON_APPDATA = 35
+
+ _SHGetFolderPath = windll.shell32.SHGetFolderPathW
+ _SHGetFolderPath.argtypes = [wintypes.HWND, ctypes.c_int, wintypes.HANDLE, wintypes.DWORD, wintypes.LPCWSTR]
+
+ path_buf = ctypes.create_unicode_buffer(wintypes.MAX_PATH)
+ _SHGetFolderPath(0, CSIDL_COMMON_APPDATA, 0, 0, path_buf)
+ return path_buf.value
+
+
+def _windows_config_path():
+ common_data = _windows_commondata_path()
+ path = os.path.join(common_data, "Datadog", DATADOG_CONF)
+ if os.path.exists(path):
+ return path
+ raise PathNotFound(path)
+
+
+def _unix_config_path():
+ path = os.path.join("/etc/dd-agent", DATADOG_CONF)
+ if os.path.exists(path):
+ return path
+ raise PathNotFound(path)
+
+
+def _mac_config_path():
+ path = os.path.join("~/.datadog-agent/agent", DATADOG_CONF)
+ path = os.path.expanduser(path)
+ if os.path.exists(path):
+ return path
+ raise PathNotFound(path)
+
+
+def get_config_path(cfg_path=None, os_name=None):
+ # Check if there's an override and if it exists
+ if cfg_path is not None and os.path.exists(cfg_path):
+ return cfg_path
+
+ if os_name is None:
+ os_name = get_os()
+
+ # Check for an OS-specific path, continue on not-found exceptions
+ if os_name == "windows":
+ return _windows_config_path()
+ elif os_name == "mac":
+ return _mac_config_path()
+ else:
+ return _unix_config_path()
+
+
+def get_config(cfg_path=None, options=None):
+ agentConfig = {}
+
+ # Config handling
+ try:
+ # Find the right config file
+ path = os.path.realpath(__file__)
+ path = os.path.dirname(path)
+
+ config_path = get_config_path(cfg_path, os_name=get_os())
+ config = configparser.ConfigParser()
+ with open(config_path) as config_file:
+ if is_p3k():
+ config.read_file(skip_leading_wsp(config_file))
+ else:
+ config.readfp(skip_leading_wsp(config_file))
+
+ # bulk import
+ for option in config.options("Main"):
+ agentConfig[option] = config.get("Main", option)
+
+ except Exception:
+ raise CfgNotFound
+
+ return agentConfig
+
+
+def get_pkg_version():
+ """
+ Resolve `datadog` package version.
+
+ Deprecated: use `datadog.__version__` directly instead
+ """
+ return __version__
+
+
+def get_version():
+ """
+ Resolve `datadog` package version.
+
+ Deprecated: use `datadog.__version__` directly instead
+ """
+ return __version__
diff --git a/lambdas/aws-dd-forwarder-3.127.0/datadog/util/deprecation.py b/lambdas/aws-dd-forwarder-3.127.0/datadog/util/deprecation.py
new file mode 100644
index 0000000..57673ef
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/datadog/util/deprecation.py
@@ -0,0 +1,24 @@
+# Unless explicitly stated otherwise all files in this repository are licensed under the BSD-3-Clause License.
+# This product includes software developed at Datadog (https://www.datadoghq.com/).
+# Copyright 2015-Present Datadog, Inc
+
+import warnings
+from functools import wraps
+
+
+def deprecated(message):
+ def deprecated_decorator(func):
+ @wraps(func)
+ def deprecated_func(*args, **kwargs):
+ warnings.warn(
+ "'{0}' is a deprecated function. {1}".format(func.__name__, message),
+ category=DeprecationWarning,
+ stacklevel=2,
+ )
+ warnings.simplefilter('default', DeprecationWarning)
+
+ return func(*args, **kwargs)
+
+ return deprecated_func
+
+ return deprecated_decorator
diff --git a/lambdas/aws-dd-forwarder-3.127.0/datadog/util/format.py b/lambdas/aws-dd-forwarder-3.127.0/datadog/util/format.py
new file mode 100644
index 0000000..f6b1e96
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/datadog/util/format.py
@@ -0,0 +1,42 @@
+# Unless explicitly stated otherwise all files in this repository are licensed under the BSD-3-Clause License.
+# This product includes software developed at Datadog (https://www.datadoghq.com/).
+# Copyright 2015-Present Datadog, Inc
+# stdlib
+import calendar
+import datetime
+import json
+import re
+
+from datadog.util.compat import conditional_lru_cache
+
+TAG_INVALID_CHARS_RE = re.compile(r"[^\w\d_\-:/\.]", re.UNICODE)
+TAG_INVALID_CHARS_SUBS = "_"
+
+
+def pretty_json(obj):
+ return json.dumps(obj, sort_keys=True, indent=2)
+
+
+def construct_url(host, api_version, path):
+ return "{}/api/{}/{}".format(host.strip("/"), api_version.strip("/"), path.strip("/"))
+
+
+def construct_path(api_version, path):
+ return "{}/{}".format(api_version.strip("/"), path.strip("/"))
+
+
+def force_to_epoch_seconds(epoch_sec_or_dt):
+ if isinstance(epoch_sec_or_dt, datetime.datetime):
+ return calendar.timegm(epoch_sec_or_dt.timetuple())
+ return epoch_sec_or_dt
+
+
+@conditional_lru_cache
+def _normalize_tags_with_cache(tag_list):
+ return [TAG_INVALID_CHARS_RE.sub(TAG_INVALID_CHARS_SUBS, tag) for tag in tag_list]
+
+
+def normalize_tags(tag_list):
+ # We have to turn our input tag list into a non-mutable tuple for it to
+ # be hashable (and thus usable) by the @lru_cache decorator.
+ return _normalize_tags_with_cache(tuple(tag_list))
diff --git a/lambdas/aws-dd-forwarder-3.127.0/datadog/util/hostname.py b/lambdas/aws-dd-forwarder-3.127.0/datadog/util/hostname.py
new file mode 100644
index 0000000..6a1f857
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/datadog/util/hostname.py
@@ -0,0 +1,305 @@
+# Unless explicitly stated otherwise all files in this repository are licensed under the BSD-3-Clause License.
+# This product includes software developed at Datadog (https://www.datadoghq.com/).
+# Copyright 2015-Present Datadog, Inc
+# stdlib
+import json
+import logging
+import re
+import socket
+import subprocess
+import types
+from typing import Dict, Optional
+
+# datadog
+from datadog.util.compat import url_lib, is_p3k, iteritems
+from datadog.util.config import get_config, get_os, CfgNotFound
+
+VALID_HOSTNAME_RFC_1123_PATTERN = re.compile(
+ r"^(([a-zA-Z0-9]|[a-zA-Z0-9][a-zA-Z0-9\-]*[a-zA-Z0-9])\.)*([A-Za-z0-9]|[A-Za-z0-9][A-Za-z0-9\-]*[A-Za-z0-9])$"
+) # noqa
+MAX_HOSTNAME_LEN = 255
+
+log = logging.getLogger("datadog.api")
+
+
+def is_valid_hostname(hostname):
+ if hostname.lower() in set(
+ [
+ "localhost",
+ "localhost.localdomain",
+ "localhost6.localdomain6",
+ "ip6-localhost",
+ ]
+ ):
+ log.warning("Hostname: %s is local" % hostname)
+ return False
+ if len(hostname) > MAX_HOSTNAME_LEN:
+ log.warning("Hostname: %s is too long (max length is %s characters)" % (hostname, MAX_HOSTNAME_LEN))
+ return False
+ if VALID_HOSTNAME_RFC_1123_PATTERN.match(hostname) is None:
+ log.warning("Hostname: %s is not complying with RFC 1123" % hostname)
+ return False
+ return True
+
+
+def get_hostname(hostname_from_config):
+ # type: (bool) -> Optional[str]
+ """
+ Get the canonical host name this agent should identify as. This is
+ the authoritative source of the host name for the agent.
+
+ Tries, in order:
+
+ * agent config (datadog.conf, "hostname:")
+ * 'hostname -f' (on unix)
+ * socket.gethostname()
+ """
+
+ hostname = None
+ config = None
+
+ # first, try the config if hostname_from_config is set to True
+ try:
+ if hostname_from_config:
+ config = get_config()
+ config_hostname = config.get("hostname")
+ if config_hostname and is_valid_hostname(config_hostname):
+ log.warning(
+ "Hostname lookup from agent configuration will be deprecated "
+ "in an upcoming version of datadogpy. Set hostname_from_config to False "
+ "to get rid of this warning"
+ )
+ return config_hostname
+ except CfgNotFound:
+ log.info("No agent or invalid configuration file found")
+
+ # Try to get GCE instance name
+ if hostname is None:
+ gce_hostname = GCE.get_hostname(config)
+ if gce_hostname is not None:
+ if is_valid_hostname(gce_hostname):
+ return gce_hostname
+ # then move on to os-specific detection
+ if hostname is None:
+
+ def _get_hostname_unix():
+ try:
+ # try fqdn
+ p = subprocess.Popen(["/bin/hostname", "-f"], stdout=subprocess.PIPE, stderr=subprocess.DEVNULL)
+ out, err = p.communicate()
+ if p.returncode == 0:
+ if is_p3k():
+ return out.decode("utf-8").strip()
+ else:
+ return out.strip()
+ except Exception:
+ return None
+
+ os_name = get_os()
+ if os_name in ["mac", "freebsd", "linux", "solaris"]:
+ unix_hostname = _get_hostname_unix()
+ if unix_hostname and is_valid_hostname(unix_hostname):
+ hostname = unix_hostname
+
+ # if we have an ec2 default hostname, see if there's an instance-id available
+ if hostname is not None and True in [hostname.lower().startswith(p) for p in [u"ip-", u"domu"]]:
+ instanceid = EC2.get_instance_id(config)
+ if instanceid:
+ hostname = instanceid
+
+ # fall back on socket.gethostname(), socket.getfqdn() is too unreliable
+ if hostname is None:
+ try:
+ socket_hostname = socket.gethostname() # type: Optional[str]
+ except socket.error:
+ socket_hostname = None
+ if socket_hostname and is_valid_hostname(socket_hostname):
+ hostname = socket_hostname
+
+ if hostname is None:
+ log.warning(
+ u"Unable to reliably determine host name. You can define one in your `hosts` file, "
+ u"or in `datadog.conf` file if you have Datadog Agent installed."
+ )
+
+ return hostname
+
+
+def get_ec2_instance_id():
+ try:
+ # Remember the previous default timeout
+ old_timeout = socket.getdefaulttimeout()
+
+ # Try to query the EC2 internal metadata service, but fail fast
+ socket.setdefaulttimeout(0.25)
+
+ try:
+ return url_lib.urlopen(url_lib.Request("http://169.254.169.254/latest/" "meta-data/instance-id")).read()
+ finally:
+ # Reset the previous default timeout
+ socket.setdefaulttimeout(old_timeout)
+ except Exception:
+ return socket.gethostname()
+
+
+class GCE(object):
+ URL = "http://169.254.169.254/computeMetadata/v1/?recursive=true"
+ TIMEOUT = 0.1 # second
+ SOURCE_TYPE_NAME = "google cloud platform"
+ metadata = None
+
+ @staticmethod
+ def _get_metadata(agentConfig):
+ if GCE.metadata is not None:
+ return GCE.metadata
+
+ if not agentConfig["collect_instance_metadata"]:
+ log.info("Instance metadata collection is disabled. Not collecting it.")
+ GCE.metadata = {}
+ return GCE.metadata
+
+ socket_to = None
+ try:
+ socket_to = socket.getdefaulttimeout()
+ socket.setdefaulttimeout(GCE.TIMEOUT)
+ except Exception:
+ pass
+
+ try:
+ opener = url_lib.build_opener()
+ opener.addheaders = [("X-Google-Metadata-Request", "True")]
+ GCE.metadata = json.loads(opener.open(GCE.URL).read().strip())
+
+ except Exception:
+ GCE.metadata = {}
+
+ try:
+ if socket_to is None:
+ socket_to = 3
+ socket.setdefaulttimeout(socket_to)
+ except Exception:
+ pass
+ return GCE.metadata
+
+ @staticmethod
+ def get_hostname(agentConfig):
+ try:
+ host_metadata = GCE._get_metadata(agentConfig)
+ return host_metadata["instance"]["hostname"].split(".")[0]
+ except Exception:
+ return None
+
+
+class EC2(object):
+ """Retrieve EC2 metadata"""
+
+ URL = "http://169.254.169.254/latest/meta-data"
+ TIMEOUT = 0.1 # second
+ metadata = {} # type: Dict[str, str]
+
+ @staticmethod
+ def get_tags(agentConfig):
+ if not agentConfig["collect_instance_metadata"]:
+ log.info("Instance metadata collection is disabled. Not collecting it.")
+ return []
+
+ socket_to = None
+ try:
+ socket_to = socket.getdefaulttimeout()
+ socket.setdefaulttimeout(EC2.TIMEOUT)
+ except Exception:
+ pass
+
+ try:
+ iam_role = url_lib.urlopen(EC2.URL + "/iam/security-credentials").read().strip()
+ iam_params = json.loads(
+ url_lib.urlopen(EC2.URL + "/iam/security-credentials" + "/" + str(iam_role)).read().strip()
+ )
+ from boto.ec2.connection import EC2Connection
+
+ connection = EC2Connection(
+ aws_access_key_id=iam_params["AccessKeyId"],
+ aws_secret_access_key=iam_params["SecretAccessKey"],
+ security_token=iam_params["Token"],
+ )
+ instance_object = connection.get_only_instances([EC2.metadata["instance-id"]])[0]
+
+ EC2_tags = [u"%s:%s" % (tag_key, tag_value) for tag_key, tag_value in iteritems(instance_object.tags)]
+
+ except Exception:
+ log.exception("Problem retrieving custom EC2 tags")
+ EC2_tags = []
+
+ try:
+ if socket_to is None:
+ socket_to = 3
+ socket.setdefaulttimeout(socket_to)
+ except Exception:
+ pass
+
+ return EC2_tags
+
+ @staticmethod
+ def get_metadata(agentConfig):
+ """Use the ec2 http service to introspect the instance. This adds latency \
+ if not running on EC2
+ """
+ # >>> import urllib2
+ # >>> urllib2.urlopen('http://169.254.169.254/latest/', timeout=1).read()
+ # 'meta-data\nuser-data'
+ # >>> urllib2.urlopen('http://169.254.169.254/latest/meta-data', timeout=1).read()
+ # 'ami-id\nami-launch-index\nami-manifest-path\nhostname\ninstance-id\nlocal-ipv4\
+ # npublic-keys/\nreservation-id\nsecurity-groups'
+ # >>> urllib2.urlopen('http://169.254.169.254/latest/meta-data/instance-id',
+ # timeout=1).read()
+ # 'i-deadbeef'
+
+ # Every call may add TIMEOUT seconds in latency so don't abuse this call
+ # python 2.4 does not support an explicit timeout argument so force it here
+ # Rather than monkey-patching urllib2, just lower the timeout globally for these calls
+
+ if not agentConfig["collect_instance_metadata"]:
+ log.info("Instance metadata collection is disabled. Not collecting it.")
+ return {}
+
+ socket_to = None
+ try:
+ socket_to = socket.getdefaulttimeout()
+ socket.setdefaulttimeout(EC2.TIMEOUT)
+ except Exception:
+ pass
+
+ for k in (
+ "instance-id",
+ "hostname",
+ "local-hostname",
+ "public-hostname",
+ "ami-id",
+ "local-ipv4",
+ "public-keys",
+ "public-ipv4",
+ "reservation-id",
+ "security-groups",
+ ):
+ try:
+ v = url_lib.urlopen(EC2.URL + "/" + str(k)).read().strip()
+ assert type(v) in (types.StringType, types.UnicodeType) and len(v) > 0, "%s is not a string" % v
+ EC2.metadata[k] = v
+ except Exception:
+ pass
+
+ try:
+ if socket_to is None:
+ socket_to = 3
+ socket.setdefaulttimeout(socket_to)
+ except Exception:
+ pass
+
+ return EC2.metadata
+
+ @staticmethod
+ def get_instance_id(agentConfig):
+ try:
+ return EC2.get_metadata(agentConfig).get("instance-id", None)
+ except Exception:
+ return None
diff --git a/lambdas/aws-dd-forwarder-3.127.0/datadog/version.py b/lambdas/aws-dd-forwarder-3.127.0/datadog/version.py
new file mode 100644
index 0000000..3158ac8
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/datadog/version.py
@@ -0,0 +1 @@
+__version__ = "0.48.0"
diff --git a/lambdas/aws-dd-forwarder-3.127.0/datadog_lambda-5.87.0.dist-info/INSTALLER b/lambdas/aws-dd-forwarder-3.127.0/datadog_lambda-5.87.0.dist-info/INSTALLER
new file mode 100644
index 0000000..a1b589e
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/datadog_lambda-5.87.0.dist-info/INSTALLER
@@ -0,0 +1 @@
+pip
diff --git a/lambdas/aws-dd-forwarder-3.127.0/datadog_lambda-5.87.0.dist-info/LICENSE b/lambdas/aws-dd-forwarder-3.127.0/datadog_lambda-5.87.0.dist-info/LICENSE
new file mode 100644
index 0000000..8263325
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/datadog_lambda-5.87.0.dist-info/LICENSE
@@ -0,0 +1,203 @@
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+ 1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+ 2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+ 3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+ 4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+ 6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+ 7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+ 8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+ 9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+ END OF TERMS AND CONDITIONS
+
+ APPENDIX: How to apply the Apache License to your work.
+
+ To apply the Apache License to your work, attach the following
+ boilerplate notice, with the fields enclosed by brackets "{}"
+ replaced with your own identifying information. (Don't include
+ the brackets!) The text should be enclosed in the appropriate
+ comment syntax for the file format. We also recommend that a
+ file or class name and description of purpose be included on the
+ same "printed page" as the copyright notice for easier
+ identification within third-party archives.
+
+ Copyright 2018 Datadog, Inc.
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+
+SPDX-License-Identifier: Apache-2.0
\ No newline at end of file
diff --git a/lambdas/aws-dd-forwarder-3.127.0/datadog_lambda-5.87.0.dist-info/METADATA b/lambdas/aws-dd-forwarder-3.127.0/datadog_lambda-5.87.0.dist-info/METADATA
new file mode 100644
index 0000000..b1eba7e
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/datadog_lambda-5.87.0.dist-info/METADATA
@@ -0,0 +1,106 @@
+Metadata-Version: 2.1
+Name: datadog-lambda
+Version: 5.87.0
+Summary: The Datadog AWS Lambda Library
+Home-page: https://github.com/DataDog/datadog-lambda-python
+License: Apache-2.0
+Keywords: datadog,aws,lambda,layer
+Author: Datadog, Inc.
+Author-email: dev@datadoghq.com
+Requires-Python: >=3.8.0,<4
+Classifier: License :: OSI Approved :: Apache Software License
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3.8
+Classifier: Programming Language :: Python :: 3.9
+Classifier: Programming Language :: Python :: 3.10
+Classifier: Programming Language :: Python :: 3.11
+Classifier: Programming Language :: Python :: 3.12
+Provides-Extra: dev
+Requires-Dist: boto3 (>=1.28.0,<2.0.0) ; extra == "dev"
+Requires-Dist: datadog (>=0.41.0,<1.0.0)
+Requires-Dist: ddtrace (>=2.3.1)
+Requires-Dist: flake8 (>=5.0.4,<6.0.0) ; extra == "dev"
+Requires-Dist: importlib_metadata ; python_version < "3.8"
+Requires-Dist: nose2 (>=0.9.1,<0.10.0) ; extra == "dev"
+Requires-Dist: requests (>=2.22.0,<3.0.0) ; extra == "dev"
+Requires-Dist: typing_extensions (>=4.0,<5.0) ; python_version < "3.8"
+Requires-Dist: urllib3 (<2.0.0) ; python_version < "3.11"
+Requires-Dist: urllib3 (<2.1.0) ; python_version >= "3.11"
+Requires-Dist: wrapt (>=1.11.2,<2.0.0)
+Project-URL: Repository, https://github.com/DataDog/datadog-lambda-python
+Description-Content-Type: text/markdown
+
+# datadog-lambda-python
+
+![build](https://github.com/DataDog/datadog-lambda-python/workflows/build/badge.svg)
+[![PyPI](https://img.shields.io/pypi/v/datadog-lambda)](https://pypi.org/project/datadog-lambda/)
+![PyPI - Python Version](https://img.shields.io/pypi/pyversions/datadog-lambda)
+[![Slack](https://chat.datadoghq.com/badge.svg?bg=632CA6)](https://chat.datadoghq.com/)
+[![License](https://img.shields.io/badge/license-Apache--2.0-blue)](https://github.com/DataDog/datadog-lambda-python/blob/main/LICENSE)
+
+Datadog Lambda Library for Python (3.8, 3.9, 3.10, 3.11, and 3.12) enables [enhanced Lambda metrics](https://docs.datadoghq.com/serverless/enhanced_lambda_metrics), [distributed tracing](https://docs.datadoghq.com/serverless/distributed_tracing), and [custom metric submission](https://docs.datadoghq.com/serverless/custom_metrics) from AWS Lambda functions.
+
+## Installation
+
+Follow the [installation instructions](https://docs.datadoghq.com/serverless/installation/python/), and view your function's enhanced metrics, traces and logs in Datadog.
+
+## Configuration
+
+Follow the [configuration instructions](https://docs.datadoghq.com/serverless/configuration) to tag your telemetry, capture request/response payloads, filter or scrub sensitive information from logs or traces, and more.
+
+For additional tracing configuration options, check out the [official documentation for Datadog trace client](https://ddtrace.readthedocs.io/en/stable/configuration.html).
+
+Besides the environment variables supported by dd-trace-py, the datadog-lambda-python library added following environment variables.
+
+| Environment Variables | Description | Default Value |
+| -------------------- | ------------ | ------------- |
+| DD_ENCODE_AUTHORIZER_CONTEXT | When set to `true` for Lambda authorizers, the tracing context will be encoded into the response for propagation. Supported for NodeJS and Python. | `true` |
+| DD_DECODE_AUTHORIZER_CONTEXT | When set to `true` for Lambdas that are authorized via Lambda authorizers, it will parse and use the encoded tracing context (if found). Supported for NodeJS and Python. | `true` |
+| DD_COLD_START_TRACING | Set to `false` to disable Cold Start Tracing. Used in NodeJS and Python. | `true` |
+| DD_MIN_COLD_START_DURATION | Sets the minimum duration (in milliseconds) for a module load event to be traced via Cold Start Tracing. Number. | `3` |
+| DD_COLD_START_TRACE_SKIP_LIB | optionally skip creating Cold Start Spans for a comma-separated list of libraries. Useful to limit depth or skip known libraries. | `ddtrace.internal.compat,ddtrace.filters` |
+| DD_CAPTURE_LAMBDA_PAYLOAD | [Captures incoming and outgoing AWS Lambda payloads][1] in the Datadog APM spans for Lambda invocations. | `false` |
+| DD_CAPTURE_LAMBDA_PAYLOAD_MAX_DEPTH | Determines the level of detail captured from AWS Lambda payloads, which are then assigned as tags for the `aws.lambda` span. It specifies the nesting depth of the JSON payload structure to process. Once the specified maximum depth is reached, the tag's value is set to the stringified value of any nested elements beyond this level.
For example, given the input payload: {
"lv1" : {
"lv2": {
"lv3": "val"
}
}
}
If the depth is set to `2`, the resulting tag's key is set to `function.request.lv1.lv2` and the value is `{\"lv3\": \"val\"}`.
If the depth is set to `0`, the resulting tag's key is set to `function.request` and value is `{\"lv1\":{\"lv2\":{\"lv3\": \"val\"}}}` | `10` |
+
+
+## Opening Issues
+
+If you encounter a bug with this package, we want to hear about it. Before opening a new issue, search the existing issues to avoid duplicates.
+
+When opening an issue, include the Datadog Lambda Library version, Python version, and stack trace if available. In addition, include the steps to reproduce when appropriate.
+
+You can also open an issue for a feature request.
+
+## Lambda Profiling Beta
+
+Datadog's [Continuous Profiler](https://www.datadoghq.com/product/code-profiling/) is now available in beta for Python in version 4.62.0 and layer version 62 and above. This optional feature is enabled by setting the `DD_PROFILING_ENABLED` environment variable to `true`. During the beta period, profiling is available at no additional cost.
+
+The Continuous Profiler works by spawning a thread which periodically wakes up and takes a snapshot of the CPU and Heap of all running python code. This can include the profiler itself. If you want the Profiler to ignore itself, set `DD_PROFILING_IGNORE_PROFILER` to `true`.
+
+## Major Version Notes
+
+### 5.x / Layer version 86+
+- Python3.7 support has been [deprecated](https://docs.aws.amazon.com/lambda/latest/dg/lambda-runtimes.html) by AWS, and support removed from this library.
+
+### 4.x / Layer version 61+
+
+- Python3.6 support has been [deprecated](https://docs.aws.amazon.com/lambda/latest/dg/lambda-runtimes.html) by AWS, and support removed from this library.
+- `dd-trace` upgraded from 0.61 to 1.4, full release notes are available [here](https://ddtrace.readthedocs.io/en/stable/release_notes.html#v1-0-0)
+ - `get_correlation_ids()` has been changed to `get_log_correlation_context()`, which now returns a dictionary containing the active `span_id`, `trace_id`, as well as `service` and `env`.
+
+## Contributing
+
+If you find an issue with this package and have a fix, please feel free to open a pull request following the [procedures](CONTRIBUTING.md).
+
+## Community
+
+For product feedback and questions, join the `#serverless` channel in the [Datadog community on Slack](https://chat.datadoghq.com/).
+
+## License
+
+Unless explicitly stated otherwise all files in this repository are licensed under the Apache License Version 2.0.
+
+This product includes software developed at Datadog (https://www.datadoghq.com/). Copyright 2019 Datadog, Inc.
+
+[1]: https://www.datadoghq.com/blog/troubleshoot-lambda-function-request-response-payloads/
+
diff --git a/lambdas/aws-dd-forwarder-3.127.0/datadog_lambda-5.87.0.dist-info/RECORD b/lambdas/aws-dd-forwarder-3.127.0/datadog_lambda-5.87.0.dist-info/RECORD
new file mode 100644
index 0000000..a275ee4
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/datadog_lambda-5.87.0.dist-info/RECORD
@@ -0,0 +1,44 @@
+datadog_lambda-5.87.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
+datadog_lambda-5.87.0.dist-info/LICENSE,sha256=4yQmjpKp1MKL7DdRDPVHkKYc2W0aezm5SIDske8oAdM,11379
+datadog_lambda-5.87.0.dist-info/METADATA,sha256=s2sPTache99ImWP7igPhfpBOuNtxNUceNYeioFEi06w,7326
+datadog_lambda-5.87.0.dist-info/RECORD,,
+datadog_lambda-5.87.0.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+datadog_lambda-5.87.0.dist-info/WHEEL,sha256=Zb28QaM1gQi8f4VCBhsUklF61CTlNYfs9YAZn-TOGFk,88
+datadog_lambda/__init__.py,sha256=fE7XRhgzSgTCjt1AAocMbqtAJM9pZiLbbcCi8BWE3fQ,538
+datadog_lambda/__pycache__/__init__.cpython-311.pyc,,
+datadog_lambda/__pycache__/api.cpython-311.pyc,,
+datadog_lambda/__pycache__/cold_start.cpython-311.pyc,,
+datadog_lambda/__pycache__/constants.cpython-311.pyc,,
+datadog_lambda/__pycache__/dogstatsd.cpython-311.pyc,,
+datadog_lambda/__pycache__/extension.cpython-311.pyc,,
+datadog_lambda/__pycache__/handler.cpython-311.pyc,,
+datadog_lambda/__pycache__/metric.cpython-311.pyc,,
+datadog_lambda/__pycache__/module_name.cpython-311.pyc,,
+datadog_lambda/__pycache__/patch.cpython-311.pyc,,
+datadog_lambda/__pycache__/stats_writer.cpython-311.pyc,,
+datadog_lambda/__pycache__/statsd_writer.cpython-311.pyc,,
+datadog_lambda/__pycache__/tag_object.cpython-311.pyc,,
+datadog_lambda/__pycache__/tags.cpython-311.pyc,,
+datadog_lambda/__pycache__/thread_stats_writer.cpython-311.pyc,,
+datadog_lambda/__pycache__/tracing.cpython-311.pyc,,
+datadog_lambda/__pycache__/trigger.cpython-311.pyc,,
+datadog_lambda/__pycache__/wrapper.cpython-311.pyc,,
+datadog_lambda/__pycache__/xray.cpython-311.pyc,,
+datadog_lambda/api.py,sha256=TFg7gCek088_C53cZQQHDoLXGlTAhP2AD8NAuWYOVco,3653
+datadog_lambda/cold_start.py,sha256=aGpWlgPdMvQkyK9kVz5pEoLIxrVa0AqZoOy5ABXyXzA,7891
+datadog_lambda/constants.py,sha256=DeujbnguBT9nDioiaYlgQQdZ6Ps53sWXmYhruLVoCHE,1669
+datadog_lambda/dogstatsd.py,sha256=HCyl72oQUSF3E4y1ivrHaGTHL9WG1asGjB1Xo2D_Abc,4769
+datadog_lambda/extension.py,sha256=zQaBioG0TrWtZvk8c9z7ANUJt_oMzeAPMG-mGPL_cMw,1199
+datadog_lambda/handler.py,sha256=r2MiZoIfTWuVAN-f6iXXIjhdtd1t7m9bTnwplVm2SEY,994
+datadog_lambda/metric.py,sha256=jk4jRgb0pwxd_c4D2zzAZ3olN_8ci64fpYk3cuxbg0U,4707
+datadog_lambda/module_name.py,sha256=5FmOCjjgjq78b6a83QePZZFmqahAoy9XHdUNWdq2D1Q,139
+datadog_lambda/patch.py,sha256=Hr_zeekk9PeAizTDFoZ_ZwTWptjgtKjl9A-XHX5kA1k,4641
+datadog_lambda/stats_writer.py,sha256=SIac96wu45AxDOZ4GraCbK3r1RKr4AFgXcEPHg1VX0A,243
+datadog_lambda/statsd_writer.py,sha256=F4SCJ6-J6YfvQNh0uQfAkP6QYiAtV3-MCsxz4QnaBBI,403
+datadog_lambda/tag_object.py,sha256=Kcys4Mo4_4vdXxq4XS7ilWpCuSQQyVRSjDejgq6RJS4,2112
+datadog_lambda/tags.py,sha256=wIG1f5iq85dq3FNV-yi-D0XwqYOx8jE0x_8Re6Ucmso,3240
+datadog_lambda/thread_stats_writer.py,sha256=fkjMDgrzwACrK_ZrCwl9mHz5U3CMLEyrsaondjdM3r8,2522
+datadog_lambda/tracing.py,sha256=r9H77-RNsmXxHA3k8yaOzmShYD4FtYtz3yrbDin36cQ,46854
+datadog_lambda/trigger.py,sha256=_Sxpy9UpMDHdw_X1hD61G4OTex7CIYQw1guFu6dzByo,12082
+datadog_lambda/wrapper.py,sha256=NrM6_TCWi4sjIHSSGAjAZV7hdF8PxJwCurRQUCnjspo,15547
+datadog_lambda/xray.py,sha256=05-8xd3GOOIDtGaB4k2Ow1kbWn86Px2mhyKEUYIwKIc,3448
diff --git a/lambdas/aws-dd-forwarder-3.127.0/datadog_lambda-5.87.0.dist-info/REQUESTED b/lambdas/aws-dd-forwarder-3.127.0/datadog_lambda-5.87.0.dist-info/REQUESTED
new file mode 100644
index 0000000..e69de29
diff --git a/lambdas/aws-dd-forwarder-3.127.0/datadog_lambda-5.87.0.dist-info/WHEEL b/lambdas/aws-dd-forwarder-3.127.0/datadog_lambda-5.87.0.dist-info/WHEEL
new file mode 100644
index 0000000..258a6ff
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/datadog_lambda-5.87.0.dist-info/WHEEL
@@ -0,0 +1,4 @@
+Wheel-Version: 1.0
+Generator: poetry-core 1.6.1
+Root-Is-Purelib: true
+Tag: py3-none-any
diff --git a/lambdas/aws-dd-forwarder-3.127.0/datadog_lambda/__init__.py b/lambdas/aws-dd-forwarder-3.127.0/datadog_lambda/__init__.py
new file mode 100644
index 0000000..20b4244
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/datadog_lambda/__init__.py
@@ -0,0 +1,17 @@
+import os
+import logging
+from datadog_lambda.cold_start import initialize_cold_start_tracing
+
+initialize_cold_start_tracing()
+
+# The minor version corresponds to the Lambda layer version.
+# E.g.,, version 0.5.0 gets packaged into layer version 5.
+try:
+ import importlib.metadata as importlib_metadata
+except ModuleNotFoundError:
+ import importlib_metadata
+
+__version__ = importlib_metadata.version(__name__)
+
+logger = logging.getLogger(__name__)
+logger.setLevel(logging.getLevelName(os.environ.get("DD_LOG_LEVEL", "INFO").upper()))
diff --git a/lambdas/aws-dd-forwarder-3.127.0/datadog_lambda/api.py b/lambdas/aws-dd-forwarder-3.127.0/datadog_lambda/api.py
new file mode 100644
index 0000000..079f69d
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/datadog_lambda/api.py
@@ -0,0 +1,93 @@
+import os
+import logging
+import base64
+from datadog_lambda.extension import should_use_extension
+
+logger = logging.getLogger(__name__)
+KMS_ENCRYPTION_CONTEXT_KEY = "LambdaFunctionName"
+
+
+def decrypt_kms_api_key(kms_client, ciphertext):
+ from botocore.exceptions import ClientError
+
+ """
+ Decodes and deciphers the base64-encoded ciphertext given as a parameter using KMS.
+ For this to work properly, the Lambda function must have the appropriate IAM permissions.
+
+ Args:
+ kms_client: The KMS client to use for decryption
+ ciphertext (string): The base64-encoded ciphertext to decrypt
+ """
+ decoded_bytes = base64.b64decode(ciphertext)
+
+ """
+ When the API key is encrypted using the AWS console, the function name is added as an
+ encryption context. When the API key is encrypted using the AWS CLI, no encryption context
+ is added. We need to try decrypting the API key both with and without the encryption context.
+ """
+ # Try without encryption context, in case API key was encrypted using the AWS CLI
+ function_name = os.environ.get("AWS_LAMBDA_FUNCTION_NAME")
+ try:
+ plaintext = kms_client.decrypt(CiphertextBlob=decoded_bytes)[
+ "Plaintext"
+ ].decode("utf-8")
+ except ClientError:
+ logger.debug(
+ "Failed to decrypt ciphertext without encryption context, \
+ retrying with encryption context"
+ )
+ # Try with encryption context, in case API key was encrypted using the AWS Console
+ plaintext = kms_client.decrypt(
+ CiphertextBlob=decoded_bytes,
+ EncryptionContext={
+ KMS_ENCRYPTION_CONTEXT_KEY: function_name,
+ },
+ )["Plaintext"].decode("utf-8")
+
+ return plaintext
+
+
+def init_api():
+ if (
+ not should_use_extension
+ and not os.environ.get("DD_FLUSH_TO_LOG", "").lower() == "true"
+ ):
+ # Make sure that this package would always be lazy-loaded/outside from the critical path
+ # since underlying packages are quite heavy to load
+ # and useless when the extension is present
+ from datadog import api
+
+ if not api._api_key:
+ import boto3
+
+ DD_API_KEY_SECRET_ARN = os.environ.get("DD_API_KEY_SECRET_ARN", "")
+ DD_API_KEY_SSM_NAME = os.environ.get("DD_API_KEY_SSM_NAME", "")
+ DD_KMS_API_KEY = os.environ.get("DD_KMS_API_KEY", "")
+ DD_API_KEY = os.environ.get(
+ "DD_API_KEY", os.environ.get("DATADOG_API_KEY", "")
+ )
+
+ if DD_API_KEY_SECRET_ARN:
+ api._api_key = boto3.client("secretsmanager").get_secret_value(
+ SecretId=DD_API_KEY_SECRET_ARN
+ )["SecretString"]
+ elif DD_API_KEY_SSM_NAME:
+ api._api_key = boto3.client("ssm").get_parameter(
+ Name=DD_API_KEY_SSM_NAME, WithDecryption=True
+ )["Parameter"]["Value"]
+ elif DD_KMS_API_KEY:
+ kms_client = boto3.client("kms")
+ api._api_key = decrypt_kms_api_key(kms_client, DD_KMS_API_KEY)
+ else:
+ api._api_key = DD_API_KEY
+
+ logger.debug("Setting DATADOG_API_KEY of length %d", len(api._api_key))
+
+ # Set DATADOG_HOST, to send data to a non-default Datadog datacenter
+ api._api_host = os.environ.get(
+ "DATADOG_HOST", "https://api." + os.environ.get("DD_SITE", "datadoghq.com")
+ )
+ logger.debug("Setting DATADOG_HOST to %s", api._api_host)
+
+ # Unmute exceptions from datadog api client, so we can catch and handle them
+ api._mute = False
diff --git a/lambdas/aws-dd-forwarder-3.127.0/datadog_lambda/cold_start.py b/lambdas/aws-dd-forwarder-3.127.0/datadog_lambda/cold_start.py
new file mode 100644
index 0000000..9da02e7
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/datadog_lambda/cold_start.py
@@ -0,0 +1,252 @@
+import time
+import os
+from typing import List, Hashable
+import logging
+
+logger = logging.getLogger(__name__)
+
+_cold_start = True
+_proactive_initialization = False
+_lambda_container_initialized = False
+
+
+def set_cold_start(init_timestamp_ns):
+ """Set the value of the cold start global
+
+ This should be executed once per Lambda execution before the execution
+ """
+ global _cold_start
+ global _lambda_container_initialized
+ global _proactive_initialization
+ if not _lambda_container_initialized:
+ now = time.time_ns()
+ if (now - init_timestamp_ns) // 1_000_000_000 > 10:
+ _cold_start = False
+ _proactive_initialization = True
+ else:
+ _cold_start = not _lambda_container_initialized
+ else:
+ _cold_start = False
+ _proactive_initialization = False
+ _lambda_container_initialized = True
+
+
+def is_cold_start():
+ """Returns the value of the global cold_start"""
+ return _cold_start
+
+
+def is_proactive_init():
+ """Returns the value of the global proactive_initialization"""
+ return _proactive_initialization
+
+
+def is_new_sandbox():
+ return is_cold_start() or is_proactive_init()
+
+
+def get_cold_start_tag():
+ """Returns the cold start tag to be used in metrics"""
+ return "cold_start:{}".format(str(is_cold_start()).lower())
+
+
+def get_proactive_init_tag():
+ """Returns the proactive init tag to be used in metrics"""
+ return "proactive_initialization:{}".format(str(is_proactive_init()).lower())
+
+
+class ImportNode(object):
+ def __init__(self, module_name, full_file_path, start_time_ns, end_time_ns=None):
+ self.module_name = module_name
+ self.full_file_path = full_file_path
+ self.start_time_ns = start_time_ns
+ self.end_time_ns = end_time_ns
+ self.children = []
+
+
+root_nodes: List[ImportNode] = []
+import_stack: List[ImportNode] = []
+already_wrapped_loaders = set()
+
+
+def reset_node_stacks():
+ global root_nodes
+ root_nodes = []
+ global import_stack
+ import_stack = []
+
+
+def push_node(module_name, file_path):
+ node = ImportNode(module_name, file_path, time.time_ns())
+ global import_stack
+ if import_stack:
+ import_stack[-1].children.append(node)
+ import_stack.append(node)
+
+
+def pop_node(module_name):
+ global import_stack
+ if not import_stack:
+ return
+ node = import_stack.pop()
+ if node.module_name != module_name:
+ return
+ end_time_ns = time.time_ns()
+ node.end_time_ns = end_time_ns
+ if not import_stack: # import_stack empty, a root node has been found
+ global root_nodes
+ root_nodes.append(node)
+
+
+def wrap_exec_module(original_exec_module):
+ def wrapped_method(module):
+ should_pop = False
+ try:
+ spec = module.__spec__
+ push_node(spec.name, spec.origin)
+ should_pop = True
+ except Exception:
+ pass
+ try:
+ return original_exec_module(module)
+ finally:
+ if should_pop:
+ pop_node(spec.name)
+
+ return wrapped_method
+
+
+def wrap_find_spec(original_find_spec):
+ def wrapped_find_spec(*args, **kwargs):
+ spec = original_find_spec(*args, **kwargs)
+ if spec is None:
+ return None
+ loader = getattr(spec, "loader", None)
+ if (
+ loader is not None
+ and isinstance(loader, Hashable)
+ and loader not in already_wrapped_loaders
+ ):
+ if hasattr(loader, "exec_module"):
+ try:
+ loader.exec_module = wrap_exec_module(loader.exec_module)
+ already_wrapped_loaders.add(loader)
+ except Exception as e:
+ logger.debug("Failed to wrap the loader. %s", e)
+ return spec
+
+ return wrapped_find_spec
+
+
+def initialize_cold_start_tracing():
+ if (
+ is_new_sandbox()
+ and os.environ.get("DD_TRACE_ENABLED", "true").lower() == "true"
+ and os.environ.get("DD_COLD_START_TRACING", "true").lower() == "true"
+ ):
+ from sys import meta_path
+
+ for importer in meta_path:
+ try:
+ importer.find_spec = wrap_find_spec(importer.find_spec)
+ except Exception:
+ pass
+
+
+class ColdStartTracer(object):
+ def __init__(
+ self,
+ tracer,
+ function_name,
+ current_span_start_time_ns,
+ trace_ctx,
+ min_duration_ms: int,
+ ignored_libs: List[str] = None,
+ ):
+ if ignored_libs is None:
+ ignored_libs = []
+ self._tracer = tracer
+ self.function_name = function_name
+ self.current_span_start_time_ns = current_span_start_time_ns
+ self.min_duration_ms = min_duration_ms
+ self.trace_ctx = trace_ctx
+ self.ignored_libs = ignored_libs
+ self.need_to_reactivate_context = True
+
+ def trace(self, root_nodes: List[ImportNode] = root_nodes):
+ if not root_nodes:
+ return
+ cold_start_span_start_time_ns = root_nodes[0].start_time_ns
+ cold_start_span_end_time_ns = min(
+ root_nodes[-1].end_time_ns, self.current_span_start_time_ns
+ )
+ cold_start_span = self.create_cold_start_span(cold_start_span_start_time_ns)
+ while root_nodes:
+ root_node = root_nodes.pop()
+ self.trace_tree(root_node, cold_start_span)
+ self.finish_span(cold_start_span, cold_start_span_end_time_ns)
+
+ def trace_tree(self, import_node: ImportNode, parent_span):
+ if (
+ import_node.end_time_ns - import_node.start_time_ns
+ < self.min_duration_ms * 1e6
+ or import_node.module_name in self.ignored_libs
+ ):
+ return
+
+ span = self.start_span(
+ "aws.lambda.import", import_node.module_name, import_node.start_time_ns
+ )
+ tags = {
+ "resource_names": import_node.module_name,
+ "resource.name": import_node.module_name,
+ "filename": import_node.full_file_path,
+ "operation_name": self.get_operation_name(import_node.full_file_path),
+ }
+ span.set_tags(tags)
+ if parent_span:
+ span.parent_id = parent_span.span_id
+ for child_node in import_node.children:
+ self.trace_tree(child_node, span)
+ self.finish_span(span, import_node.end_time_ns)
+
+ def create_cold_start_span(self, start_time_ns):
+ span = self.start_span("aws.lambda.load", self.function_name, start_time_ns)
+ tags = {
+ "resource_names": self.function_name,
+ "resource.name": self.function_name,
+ "operation_name": "aws.lambda.load",
+ }
+ span.set_tags(tags)
+ return span
+
+ def start_span(self, span_type, resource, start_time_ns):
+ if self.need_to_reactivate_context:
+ self._tracer.context_provider.activate(
+ self.trace_ctx
+ ) # reactivate required after each finish() call
+ self.need_to_reactivate_context = False
+ span_kwargs = {
+ "service": "aws.lambda",
+ "resource": resource,
+ "span_type": span_type,
+ }
+ span = self._tracer.trace(span_type, **span_kwargs)
+ span.start_ns = start_time_ns
+ return span
+
+ def finish_span(self, span, finish_time_ns):
+ span.finish(finish_time_ns / 1e9)
+ self.need_to_reactivate_context = True
+
+ def get_operation_name(self, filename: str):
+ if filename is None:
+ return "aws.lambda.import_core_module"
+ if not isinstance(filename, str):
+ return "aws.lambda.import"
+ if filename.startswith("/opt/"):
+ return "aws.lambda.import_layer"
+ elif filename.startswith("/var/lang/"):
+ return "aws.lambda.import_runtime"
+ else:
+ return "aws.lambda.import"
diff --git a/lambdas/aws-dd-forwarder-3.127.0/datadog_lambda/constants.py b/lambdas/aws-dd-forwarder-3.127.0/datadog_lambda/constants.py
new file mode 100644
index 0000000..fd8afb3
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/datadog_lambda/constants.py
@@ -0,0 +1,53 @@
+# Unless explicitly stated otherwise all files in this repository are licensed
+# under the Apache License Version 2.0.
+# This product includes software developed at Datadog (https://www.datadoghq.com/).
+# Copyright 2019 Datadog, Inc.
+
+# Datadog trace sampling priority
+
+
+class SamplingPriority(object):
+ USER_REJECT = -1
+ AUTO_REJECT = 0
+ AUTO_KEEP = 1
+ USER_KEEP = 2
+
+
+# Datadog trace headers
+class TraceHeader(object):
+ TRACE_ID = "x-datadog-trace-id"
+ PARENT_ID = "x-datadog-parent-id"
+ SAMPLING_PRIORITY = "x-datadog-sampling-priority"
+
+
+# X-Ray subsegment to save Datadog trace metadata
+class XraySubsegment(object):
+ NAME = "datadog-metadata"
+ TRACE_KEY = "trace"
+ LAMBDA_FUNCTION_TAGS_KEY = "lambda_function_tags"
+ NAMESPACE = "datadog"
+
+
+# TraceContextSource of datadog context. The DD_MERGE_XRAY_TRACES
+# feature uses this to determine when to use X-Ray as the parent
+# trace.
+class TraceContextSource(object):
+ XRAY = "xray"
+ EVENT = "event"
+ DDTRACE = "ddtrace"
+
+
+# X-Ray deamon
+class XrayDaemon(object):
+ XRAY_TRACE_ID_HEADER_NAME = "_X_AMZN_TRACE_ID"
+ XRAY_DAEMON_ADDRESS = "AWS_XRAY_DAEMON_ADDRESS"
+ FUNCTION_NAME_HEADER_NAME = "AWS_LAMBDA_FUNCTION_NAME"
+
+
+class Headers(object):
+ Parent_Span_Finish_Time = "x-datadog-parent-span-finish-time"
+
+ # For one request from the client, the event.requestContext.requestIds in the authorizer lambda
+ # invocation and the main function invocation are IDENTICAL. Therefore we can use it to tell
+ # whether current invocation is the actual original authorizing request or a cached request.
+ Authorizing_Request_Id = "x-datadog-authorizing-requestid"
diff --git a/lambdas/aws-dd-forwarder-3.127.0/datadog_lambda/dogstatsd.py b/lambdas/aws-dd-forwarder-3.127.0/datadog_lambda/dogstatsd.py
new file mode 100644
index 0000000..a627492
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/datadog_lambda/dogstatsd.py
@@ -0,0 +1,143 @@
+import logging
+import os
+import socket
+import errno
+import re
+from threading import Lock
+
+
+MIN_SEND_BUFFER_SIZE = 32 * 1024
+log = logging.getLogger("datadog_lambda.dogstatsd")
+
+
+class DogStatsd(object):
+ def __init__(self):
+ self._socket_lock = Lock()
+ self.socket_path = None
+ self.host = "localhost"
+ self.port = 8125
+ self.socket = None
+ self.encoding = "utf-8"
+
+ def get_socket(self, telemetry=False):
+ """
+ Return a connected socket.
+
+ Note: connect the socket before assigning it to the class instance to
+ avoid bad thread race conditions.
+ """
+ with self._socket_lock:
+ self.socket = self._get_udp_socket(
+ self.host,
+ self.port,
+ )
+ return self.socket
+
+ @classmethod
+ def _ensure_min_send_buffer_size(cls, sock, min_size=MIN_SEND_BUFFER_SIZE):
+ # Increase the receiving buffer size where needed (e.g. MacOS has 4k RX
+ # buffers which is half of the max packet size that the client will send.
+ if os.name == "posix":
+ try:
+ recv_buff_size = sock.getsockopt(socket.SOL_SOCKET, socket.SO_SNDBUF)
+ if recv_buff_size <= min_size:
+ sock.setsockopt(socket.SOL_SOCKET, socket.SO_SNDBUF, min_size)
+ log.debug("Socket send buffer increased to %dkb", min_size / 1024)
+ finally:
+ pass
+
+ @classmethod
+ def _get_udp_socket(cls, host, port):
+ sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
+ sock.setblocking(0)
+ cls._ensure_min_send_buffer_size(sock)
+ sock.connect((host, port))
+
+ return sock
+
+ def distribution(self, metric, value, tags=None):
+ """
+ Send a global distribution value, optionally setting tags.
+
+ >>> statsd.distribution("uploaded.file.size", 1445)
+ >>> statsd.distribution("album.photo.count", 26, tags=["gender:female"])
+ """
+ self._report(metric, "d", value, tags)
+
+ def close_socket(self):
+ """
+ Closes connected socket if connected.
+ """
+ with self._socket_lock:
+ if self.socket:
+ try:
+ self.socket.close()
+ except OSError as e:
+ log.error("Unexpected error: %s", str(e))
+ self.socket = None
+
+ def normalize_tags(self, tag_list):
+ TAG_INVALID_CHARS_RE = re.compile(r"[^\w\d_\-:/\.]", re.UNICODE)
+ TAG_INVALID_CHARS_SUBS = "_"
+ return [
+ re.sub(TAG_INVALID_CHARS_RE, TAG_INVALID_CHARS_SUBS, tag)
+ for tag in tag_list
+ ]
+
+ def _serialize_metric(self, metric, metric_type, value, tags):
+ # Create/format the metric packet
+ return "%s:%s|%s%s" % (
+ metric,
+ value,
+ metric_type,
+ ("|#" + ",".join(self.normalize_tags(tags))) if tags else "",
+ )
+
+ def _report(self, metric, metric_type, value, tags):
+ if value is None:
+ return
+
+ payload = self._serialize_metric(metric, metric_type, value, tags)
+
+ # Send it
+ self._send_to_server(payload)
+
+ def _send_to_server(self, packet):
+ try:
+ mysocket = self.socket or self.get_socket()
+ mysocket.send(packet.encode(self.encoding))
+ return True
+ except socket.timeout:
+ # dogstatsd is overflowing, drop the packets (mimicks the UDP behaviour)
+ pass
+ except (socket.herror, socket.gaierror) as socket_err:
+ log.warning(
+ "Error submitting packet: %s, dropping the packet and closing the socket",
+ socket_err,
+ )
+ self.close_socket()
+ except socket.error as socket_err:
+ if socket_err.errno == errno.EAGAIN:
+ log.debug(
+ "Socket send would block: %s, dropping the packet", socket_err
+ )
+ elif socket_err.errno == errno.ENOBUFS:
+ log.debug("Socket buffer full: %s, dropping the packet", socket_err)
+ elif socket_err.errno == errno.EMSGSIZE:
+ log.debug(
+ "Packet size too big (size: %d): %s, dropping the packet",
+ len(packet.encode(self.encoding)),
+ socket_err,
+ )
+ else:
+ log.warning(
+ "Error submitting packet: %s, dropping the packet and closing the socket",
+ socket_err,
+ )
+ self.close_socket()
+ except Exception as e:
+ log.error("Unexpected error: %s", str(e))
+ return False
+
+
+statsd = DogStatsd()
diff --git a/lambdas/aws-dd-forwarder-3.127.0/datadog_lambda/extension.py b/lambdas/aws-dd-forwarder-3.127.0/datadog_lambda/extension.py
new file mode 100644
index 0000000..d66848f
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/datadog_lambda/extension.py
@@ -0,0 +1,42 @@
+import logging
+from os import path
+
+try:
+ # only available in python 3
+ # not an issue since the extension is not compatible with python 2.x runtime
+ # https://docs.aws.amazon.com/lambda/latest/dg/using-extensions.html
+ import urllib.request
+except ImportError:
+ # safe since both calls to urllib are protected with try/expect and will return false
+ urllib = None
+
+AGENT_URL = "http://127.0.0.1:8124"
+HELLO_PATH = "/lambda/hello"
+FLUSH_PATH = "/lambda/flush"
+EXTENSION_PATH = "/opt/extensions/datadog-agent"
+
+logger = logging.getLogger(__name__)
+
+
+def is_extension_running():
+ if not path.exists(EXTENSION_PATH):
+ return False
+ try:
+ urllib.request.urlopen(AGENT_URL + HELLO_PATH)
+ except Exception as e:
+ logger.debug("Extension is not running, returned with error %s", e)
+ return False
+ return True
+
+
+def flush_extension():
+ try:
+ req = urllib.request.Request(AGENT_URL + FLUSH_PATH, "".encode("ascii"))
+ urllib.request.urlopen(req)
+ except Exception as e:
+ logger.debug("Failed to flush extension, returned with error %s", e)
+ return False
+ return True
+
+
+should_use_extension = is_extension_running()
diff --git a/lambdas/aws-dd-forwarder-3.127.0/datadog_lambda/handler.py b/lambdas/aws-dd-forwarder-3.127.0/datadog_lambda/handler.py
new file mode 100644
index 0000000..09cc5e7
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/datadog_lambda/handler.py
@@ -0,0 +1,31 @@
+# Unless explicitly stated otherwise all files in this repository are licensed
+# under the Apache License Version 2.0.
+# This product includes software developed at Datadog (https://www.datadoghq.com/).
+# Copyright 2020 Datadog, Inc.
+
+from __future__ import absolute_import
+from importlib import import_module
+
+import os
+from datadog_lambda.wrapper import datadog_lambda_wrapper
+from datadog_lambda.module_name import modify_module_name
+
+
+class HandlerError(Exception):
+ pass
+
+
+path = os.environ.get("DD_LAMBDA_HANDLER", None)
+if path is None:
+ raise HandlerError(
+ "DD_LAMBDA_HANDLER is not defined. Can't use prebuilt datadog handler"
+ )
+parts = path.rsplit(".", 1)
+if len(parts) != 2:
+ raise HandlerError("Value %s for DD_LAMBDA_HANDLER has invalid format." % path)
+
+
+(mod_name, handler_name) = parts
+modified_mod_name = modify_module_name(mod_name)
+handler_module = import_module(modified_mod_name)
+handler = datadog_lambda_wrapper(getattr(handler_module, handler_name))
diff --git a/lambdas/aws-dd-forwarder-3.127.0/datadog_lambda/metric.py b/lambdas/aws-dd-forwarder-3.127.0/datadog_lambda/metric.py
new file mode 100644
index 0000000..ca23ed9
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/datadog_lambda/metric.py
@@ -0,0 +1,136 @@
+# Unless explicitly stated otherwise all files in this repository are licensed
+# under the Apache License Version 2.0.
+# This product includes software developed at Datadog (https://www.datadoghq.com/).
+# Copyright 2019 Datadog, Inc.
+
+import os
+import json
+import time
+import logging
+
+from datadog_lambda.extension import should_use_extension
+from datadog_lambda.tags import get_enhanced_metrics_tags, tag_dd_lambda_layer
+from datadog_lambda.api import init_api
+
+logger = logging.getLogger(__name__)
+
+lambda_stats = None
+
+init_api()
+
+if should_use_extension:
+ from datadog_lambda.statsd_writer import StatsDWriter
+
+ lambda_stats = StatsDWriter()
+else:
+ # Periodical flushing in a background thread is NOT guaranteed to succeed
+ # and leads to data loss. When disabled, metrics are only flushed at the
+ # end of invocation. To make metrics submitted from a long-running Lambda
+ # function available sooner, consider using the Datadog Lambda extension.
+ from datadog_lambda.thread_stats_writer import ThreadStatsWriter
+
+ flush_in_thread = os.environ.get("DD_FLUSH_IN_THREAD", "").lower() == "true"
+ lambda_stats = ThreadStatsWriter(flush_in_thread)
+
+
+def lambda_metric(metric_name, value, timestamp=None, tags=None, force_async=False):
+ """
+ Submit a data point to Datadog distribution metrics.
+ https://docs.datadoghq.com/graphing/metrics/distributions/
+
+ When DD_FLUSH_TO_LOG is True, write metric to log, and
+ wait for the Datadog Log Forwarder Lambda function to submit
+ the metrics asynchronously.
+
+ Otherwise, the metrics will be submitted to the Datadog API
+ periodically and at the end of the function execution in a
+ background thread.
+
+ Note that if the extension is present, it will override the DD_FLUSH_TO_LOG value
+ and always use the layer to send metrics to the extension
+ """
+ flush_to_logs = os.environ.get("DD_FLUSH_TO_LOG", "").lower() == "true"
+ tags = tag_dd_lambda_layer(tags)
+
+ if should_use_extension:
+ logger.debug(
+ "Sending metric %s value %s to Datadog via extension", metric_name, value
+ )
+ lambda_stats.distribution(metric_name, value, tags=tags, timestamp=timestamp)
+ else:
+ if flush_to_logs or force_async:
+ write_metric_point_to_stdout(
+ metric_name, value, timestamp=timestamp, tags=tags
+ )
+ else:
+ lambda_stats.distribution(
+ metric_name, value, tags=tags, timestamp=timestamp
+ )
+
+
+def write_metric_point_to_stdout(metric_name, value, timestamp=None, tags=[]):
+ """Writes the specified metric point to standard output"""
+ logger.debug(
+ "Sending metric %s value %s to Datadog via log forwarder", metric_name, value
+ )
+ print(
+ json.dumps(
+ {
+ "m": metric_name,
+ "v": value,
+ "e": timestamp or int(time.time()),
+ "t": tags,
+ }
+ )
+ )
+
+
+def flush_stats():
+ lambda_stats.flush()
+
+
+def are_enhanced_metrics_enabled():
+ """Check env var to find if enhanced metrics should be submitted
+
+ Returns:
+ boolean for whether enhanced metrics are enabled
+ """
+ # DD_ENHANCED_METRICS defaults to true
+ return os.environ.get("DD_ENHANCED_METRICS", "true").lower() == "true"
+
+
+def submit_enhanced_metric(metric_name, lambda_context):
+ """Submits the enhanced metric with the given name
+
+ Args:
+ metric_name (str): metric name w/o enhanced prefix i.e. "invocations" or "errors"
+ lambda_context (dict): Lambda context dict passed to the function by AWS
+ """
+ if not are_enhanced_metrics_enabled():
+ logger.debug(
+ "Not submitting enhanced metric %s because enhanced metrics are disabled",
+ metric_name,
+ )
+ return
+ tags = get_enhanced_metrics_tags(lambda_context)
+ metric_name = "aws.lambda.enhanced." + metric_name
+ # Enhanced metrics always use an async submission method, (eg logs or extension).
+ lambda_metric(metric_name, 1, timestamp=None, tags=tags, force_async=True)
+
+
+def submit_invocations_metric(lambda_context):
+ """Increment aws.lambda.enhanced.invocations by 1, applying runtime, layer, and cold_start tags
+
+ Args:
+ lambda_context (dict): Lambda context dict passed to the function by AWS
+ """
+ submit_enhanced_metric("invocations", lambda_context)
+
+
+def submit_errors_metric(lambda_context):
+ """Increment aws.lambda.enhanced.errors by 1, applying runtime, layer, and cold_start tags
+
+ Args:
+ lambda_context (dict): Lambda context dict passed to the function by AWS
+ """
+ submit_enhanced_metric("errors", lambda_context)
diff --git a/lambdas/aws-dd-forwarder-3.127.0/datadog_lambda/module_name.py b/lambdas/aws-dd-forwarder-3.127.0/datadog_lambda/module_name.py
new file mode 100644
index 0000000..9e4a93e
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/datadog_lambda/module_name.py
@@ -0,0 +1,3 @@
+def modify_module_name(module_name):
+ """Returns a valid modified module to get imported"""
+ return ".".join(module_name.split("/"))
diff --git a/lambdas/aws-dd-forwarder-3.127.0/datadog_lambda/patch.py b/lambdas/aws-dd-forwarder-3.127.0/datadog_lambda/patch.py
new file mode 100644
index 0000000..0f6d28e
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/datadog_lambda/patch.py
@@ -0,0 +1,159 @@
+# Unless explicitly stated otherwise all files in this repository are licensed
+# under the Apache License Version 2.0.
+# This product includes software developed at Datadog (https://www.datadoghq.com/).
+# Copyright 2019 Datadog, Inc.
+
+import json
+import os
+import sys
+import logging
+import zlib
+
+from wrapt import wrap_function_wrapper as wrap
+from wrapt.importer import when_imported
+from ddtrace import patch_all as patch_all_dd
+
+from datadog_lambda.tracing import (
+ get_dd_trace_context,
+ dd_tracing_enabled,
+)
+from collections.abc import MutableMapping
+
+logger = logging.getLogger(__name__)
+
+_http_patched = False
+_requests_patched = False
+_integration_tests_patched = False
+
+
+def patch_all():
+ """
+ Patch third-party libraries for tracing.
+ """
+ _patch_for_integration_tests()
+
+ if dd_tracing_enabled:
+ patch_all_dd()
+ else:
+ _patch_http()
+ _ensure_patch_requests()
+
+
+def _patch_for_integration_tests():
+ """
+ Patch `requests` to log the outgoing requests for integration tests.
+ """
+ global _integration_tests_patched
+ is_in_tests = os.environ.get("DD_INTEGRATION_TEST", "false").lower() == "true"
+ if not _integration_tests_patched and is_in_tests:
+ wrap("requests", "Session.send", _log_request)
+ _integration_tests_patched = True
+
+
+def _patch_http():
+ """
+ Patch `http.client` (Python 3) module.
+ """
+ global _http_patched
+ http_module = "http.client"
+ if not _http_patched:
+ _http_patched = True
+ wrap(http_module, "HTTPConnection.request", _wrap_http_request)
+
+ logger.debug("Patched %s", http_module)
+
+
+def _ensure_patch_requests():
+ """
+ `requests` is third-party, may not be installed or used,
+ but ensure it gets patched if installed and used.
+ """
+ if "requests" in sys.modules:
+ # already imported, patch now
+ _patch_requests(sys.modules["requests"])
+ else:
+ # patch when imported
+ when_imported("requests")(_patch_requests)
+
+
+def _patch_requests(module):
+ """
+ Patch the high-level HTTP client module `requests`
+ if it's installed.
+ """
+ global _requests_patched
+ if not _requests_patched:
+ _requests_patched = True
+ try:
+ wrap("requests", "Session.request", _wrap_requests_request)
+ logger.debug("Patched requests")
+ except Exception:
+ logger.debug("Failed to patch requests", exc_info=True)
+
+
+def _wrap_requests_request(func, instance, args, kwargs):
+ """
+ Wrap `requests.Session.request` to inject the Datadog trace headers
+ into the outgoing requests.
+ """
+ context = get_dd_trace_context()
+ if "headers" in kwargs and isinstance(kwargs["headers"], MutableMapping):
+ kwargs["headers"].update(context)
+ elif len(args) >= 5 and isinstance(args[4], MutableMapping):
+ args[4].update(context)
+ else:
+ kwargs["headers"] = context
+
+ return func(*args, **kwargs)
+
+
+def _wrap_http_request(func, instance, args, kwargs):
+ """
+ Wrap `http.client` (python3) to inject
+ the Datadog trace headers into the outgoing requests.
+ """
+ context = get_dd_trace_context()
+ if "headers" in kwargs and isinstance(kwargs["headers"], MutableMapping):
+ kwargs["headers"].update(context)
+ elif len(args) >= 4 and isinstance(args[3], MutableMapping):
+ args[3].update(context)
+ else:
+ kwargs["headers"] = context
+
+ return func(*args, **kwargs)
+
+
+def _log_request(func, instance, args, kwargs):
+ request = kwargs.get("request") or args[0]
+ _print_request_string(request)
+ return func(*args, **kwargs)
+
+
+def _print_request_string(request):
+ """Print the request so that it can be checked in integration tests
+
+ Only used by integration tests.
+ """
+ method = request.method
+ url = request.url
+
+ # Sort the datapoints POSTed by their name so that snapshots always align
+ data = request.body or "{}"
+ # If payload is compressed, decompress it so we can parse it
+ if request.headers.get("Content-Encoding") == "deflate":
+ data = zlib.decompress(data)
+ data_dict = json.loads(data)
+ data_dict.get("series", []).sort(key=lambda series: series.get("metric"))
+ sorted_data = json.dumps(data_dict)
+
+ # Sort headers to prevent any differences in ordering
+ headers = request.headers or {}
+ sorted_headers = sorted(
+ "{}:{}".format(key, value) for key, value in headers.items()
+ )
+ sorted_header_str = json.dumps(sorted_headers)
+ print(
+ "HTTP {} {} Headers: {} Data: {}".format(
+ method, url, sorted_header_str, sorted_data
+ )
+ )
diff --git a/lambdas/aws-dd-forwarder-3.127.0/datadog_lambda/stats_writer.py b/lambdas/aws-dd-forwarder-3.127.0/datadog_lambda/stats_writer.py
new file mode 100644
index 0000000..d3919c3
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/datadog_lambda/stats_writer.py
@@ -0,0 +1,9 @@
+class StatsWriter:
+ def distribution(self, metric_name, value, tags=[], timestamp=None):
+ raise NotImplementedError()
+
+ def flush(self):
+ raise NotImplementedError()
+
+ def stop(self):
+ raise NotImplementedError()
diff --git a/lambdas/aws-dd-forwarder-3.127.0/datadog_lambda/statsd_writer.py b/lambdas/aws-dd-forwarder-3.127.0/datadog_lambda/statsd_writer.py
new file mode 100644
index 0000000..33843dc
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/datadog_lambda/statsd_writer.py
@@ -0,0 +1,17 @@
+from datadog_lambda.stats_writer import StatsWriter
+from datadog_lambda.dogstatsd import statsd
+
+
+class StatsDWriter(StatsWriter):
+ """
+ Writes distribution metrics using StatsD protocol
+ """
+
+ def distribution(self, metric_name, value, tags=[], timestamp=None):
+ statsd.distribution(metric_name, value, tags=tags)
+
+ def flush(self):
+ pass
+
+ def stop(self):
+ pass
diff --git a/lambdas/aws-dd-forwarder-3.127.0/datadog_lambda/tag_object.py b/lambdas/aws-dd-forwarder-3.127.0/datadog_lambda/tag_object.py
new file mode 100644
index 0000000..ec1c5a6
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/datadog_lambda/tag_object.py
@@ -0,0 +1,68 @@
+# Unless explicitly stated otherwise all files in this repository are licensed
+# under the Apache License Version 2.0.
+# This product includes software developed at Datadog (https://www.datadoghq.com/).
+# Copyright 2021 Datadog, Inc.
+
+from decimal import Decimal
+import json
+import logging
+
+redactable_keys = ["authorization", "x-authorization", "password", "token"]
+max_depth = 10
+logger = logging.getLogger(__name__)
+
+
+def tag_object(span, key, obj, depth=0):
+ if obj is None:
+ return span.set_tag(key, obj)
+ if depth >= max_depth:
+ return span.set_tag(key, _redact_val(key, str(obj)[0:5000]))
+ depth += 1
+ if _should_try_string(obj):
+ parsed = None
+ try:
+ parsed = json.loads(obj)
+ return tag_object(span, key, parsed, depth)
+ except ValueError:
+ redacted = _redact_val(key, obj[0:5000])
+ return span.set_tag(key, redacted)
+ if isinstance(obj, int) or isinstance(obj, float) or isinstance(obj, Decimal):
+ return span.set_tag(key, str(obj))
+ if isinstance(obj, list):
+ for k, v in enumerate(obj):
+ formatted_key = "{}.{}".format(key, k)
+ tag_object(span, formatted_key, v, depth)
+ return
+ if hasattr(obj, "items"):
+ for k, v in obj.items():
+ formatted_key = "{}.{}".format(key, k)
+ tag_object(span, formatted_key, v, depth)
+ return
+ if hasattr(obj, "to_dict"):
+ for k, v in obj.to_dict().items():
+ formatted_key = "{}.{}".format(key, k)
+ tag_object(span, formatted_key, v, depth)
+ return
+ try:
+ value_as_str = str(obj)
+ except Exception:
+ value_as_str = "UNKNOWN"
+ return span.set_tag(key, value_as_str)
+
+
+def _should_try_string(obj):
+ try:
+ if isinstance(obj, str) or isinstance(obj, unicode):
+ return True
+ except NameError:
+ if isinstance(obj, bytes):
+ return True
+
+ return False
+
+
+def _redact_val(k, v):
+ split_key = k.split(".").pop() or k
+ if split_key in redactable_keys:
+ return "redacted"
+ return v
diff --git a/lambdas/aws-dd-forwarder-3.127.0/datadog_lambda/tags.py b/lambdas/aws-dd-forwarder-3.127.0/datadog_lambda/tags.py
new file mode 100644
index 0000000..cdaeb4e
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/datadog_lambda/tags.py
@@ -0,0 +1,104 @@
+import sys
+
+from platform import python_version_tuple
+
+from datadog_lambda import __version__
+from datadog_lambda.cold_start import get_cold_start_tag
+
+
+def _format_dd_lambda_layer_tag():
+ """
+ Formats the dd_lambda_layer tag, e.g., 'dd_lambda_layer:datadog-python39_1'
+ """
+ runtime = "python{}{}".format(sys.version_info[0], sys.version_info[1])
+ return "dd_lambda_layer:datadog-{}_{}".format(runtime, __version__)
+
+
+def tag_dd_lambda_layer(tags):
+ """
+ Used by lambda_metric to insert the dd_lambda_layer tag
+ """
+ dd_lambda_layer_tag = _format_dd_lambda_layer_tag()
+ if tags:
+ return tags + [dd_lambda_layer_tag]
+ else:
+ return [dd_lambda_layer_tag]
+
+
+def parse_lambda_tags_from_arn(lambda_context):
+ """Generate the list of lambda tags based on the data in the arn
+ Args:
+ lambda_context: Aws lambda context object
+ ex: lambda_context.arn = arn:aws:lambda:us-east-1:123597598159:function:my-lambda:1
+ """
+ # Set up flag for extra testing to distinguish between a version or alias
+ hasAlias = False
+ # Cap the number of times to spli
+ split_arn = lambda_context.invoked_function_arn.split(":")
+
+ if len(split_arn) > 7:
+ hasAlias = True
+ _, _, _, region, account_id, _, function_name, alias = split_arn
+ else:
+ _, _, _, region, account_id, _, function_name = split_arn
+
+ # Add the standard tags to a list
+ tags = [
+ "region:{}".format(region),
+ "account_id:{}".format(account_id),
+ "functionname:{}".format(function_name),
+ ]
+
+ # Check if we have a version or alias
+ if hasAlias:
+ # If $Latest, drop the $ for datadog tag convention. A lambda alias can't start with $
+ if alias.startswith("$"):
+ alias = alias[1:]
+ # Versions are numeric. Aliases need the executed version tag
+ elif not check_if_number(alias):
+ tags.append("executedversion:{}".format(lambda_context.function_version))
+ # create resource tag with function name and alias/version
+ resource = "resource:{}:{}".format(function_name, alias)
+ else:
+ # Resource is only the function name otherwise
+ resource = "resource:{}".format(function_name)
+
+ tags.append(resource)
+
+ return tags
+
+
+def get_runtime_tag():
+ """Get the runtime tag from the current Python version"""
+ major_version, minor_version, _ = python_version_tuple()
+
+ return "runtime:python{major}.{minor}".format(
+ major=major_version, minor=minor_version
+ )
+
+
+def get_library_version_tag():
+ """Get datadog lambda library tag"""
+ return "datadog_lambda:v{}".format(__version__)
+
+
+def get_enhanced_metrics_tags(lambda_context):
+ """Get the list of tags to apply to enhanced metrics"""
+ return parse_lambda_tags_from_arn(lambda_context) + [
+ get_cold_start_tag(),
+ "memorysize:{}".format(lambda_context.memory_limit_in_mb),
+ get_runtime_tag(),
+ get_library_version_tag(),
+ ]
+
+
+def check_if_number(alias):
+ """
+ Check if the alias is a version or number.
+ Python 2 has no easy way to test this like Python 3
+ """
+ try:
+ float(alias)
+ return True
+ except ValueError:
+ return False
diff --git a/lambdas/aws-dd-forwarder-3.127.0/datadog_lambda/thread_stats_writer.py b/lambdas/aws-dd-forwarder-3.127.0/datadog_lambda/thread_stats_writer.py
new file mode 100644
index 0000000..bfcf3c9
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/datadog_lambda/thread_stats_writer.py
@@ -0,0 +1,65 @@
+import logging
+
+# Make sure that this package would always be lazy-loaded/outside from the critical path
+# since underlying packages are quite heavy to load and useless when the extension is present
+from datadog.threadstats import ThreadStats
+from datadog_lambda.stats_writer import StatsWriter
+
+logger = logging.getLogger(__name__)
+
+
+class ThreadStatsWriter(StatsWriter):
+ """
+ Writes distribution metrics using the ThreadStats class
+ """
+
+ def __init__(self, flush_in_thread):
+ self.thread_stats = ThreadStats(compress_payload=True)
+ self.thread_stats.start(flush_in_thread=flush_in_thread)
+
+ def distribution(self, metric_name, value, tags=[], timestamp=None):
+ self.thread_stats.distribution(
+ metric_name, value, tags=tags, timestamp=timestamp
+ )
+
+ def flush(self):
+ """ "Flush distributions from ThreadStats to Datadog.
+ Modified based on `datadog.threadstats.base.ThreadStats.flush()`,
+ to gain better control over exception handling.
+ """
+ _, dists = self.thread_stats._get_aggregate_metrics_and_dists(float("inf"))
+ count_dists = len(dists)
+ if not count_dists:
+ logger.debug("No distributions to flush. Continuing.")
+
+ self.thread_stats.flush_count += 1
+ logger.debug(
+ "Flush #%s sending %s distributions",
+ self.thread_stats.flush_count,
+ count_dists,
+ )
+ try:
+ self.thread_stats.reporter.flush_distributions(dists)
+ except Exception as e:
+ # The nature of the root issue https://bugs.python.org/issue41345 is complex,
+ # but comprehensive tests suggest that it is safe to retry on this specific error.
+ if type(e).__name__ == "ClientError" and "RemoteDisconnected" in str(e):
+ logger.debug(
+ "Retry flush #%s due to RemoteDisconnected",
+ self.thread_stats.flush_count,
+ )
+ try:
+ self.thread_stats.reporter.flush_distributions(dists)
+ except Exception:
+ logger.debug(
+ "Flush #%s failed after retry",
+ self.thread_stats.flush_count,
+ exc_info=True,
+ )
+ else:
+ logger.debug(
+ "Flush #%s failed", self.thread_stats.flush_count, exc_info=True
+ )
+
+ def stop(self):
+ self.thread_stats.stop()
diff --git a/lambdas/aws-dd-forwarder-3.127.0/datadog_lambda/tracing.py b/lambdas/aws-dd-forwarder-3.127.0/datadog_lambda/tracing.py
new file mode 100644
index 0000000..dc7e32b
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/datadog_lambda/tracing.py
@@ -0,0 +1,1308 @@
+# Unless explicitly stated otherwise all files in this repository are licensed
+# under the Apache License Version 2.0.
+# This product includes software developed at Datadog (https://www.datadoghq.com/).
+# Copyright 2019 Datadog, Inc.
+import hashlib
+import logging
+import os
+import json
+import base64
+from datetime import datetime, timezone
+from typing import Optional, Dict
+
+from datadog_lambda.metric import submit_errors_metric
+
+try:
+ from typing import Literal
+except ImportError:
+ # Literal was added to typing in python 3.8
+ from typing_extensions import Literal
+
+from datadog_lambda.constants import (
+ SamplingPriority,
+ TraceContextSource,
+ XrayDaemon,
+ Headers,
+)
+from datadog_lambda.xray import (
+ send_segment,
+ parse_xray_header,
+)
+from ddtrace import tracer, patch, Span
+from ddtrace import __version__ as ddtrace_version
+from ddtrace.propagation.http import HTTPPropagator
+from ddtrace.context import Context
+from datadog_lambda import __version__ as datadog_lambda_version
+from datadog_lambda.trigger import (
+ _EventSource,
+ parse_event_source,
+ get_first_record,
+ EventTypes,
+ EventSubtypes,
+)
+
+dd_trace_otel_enabled = (
+ os.environ.get("DD_TRACE_OTEL_ENABLED", "false").lower() == "true"
+)
+if dd_trace_otel_enabled:
+ from opentelemetry.trace import set_tracer_provider
+ from ddtrace.opentelemetry import TracerProvider
+
+ set_tracer_provider(TracerProvider())
+
+
+logger = logging.getLogger(__name__)
+
+dd_trace_context = None
+dd_tracing_enabled = os.environ.get("DD_TRACE_ENABLED", "false").lower() == "true"
+if dd_tracing_enabled:
+ # Enable the telemetry client if the user has opted in
+ if (
+ os.environ.get("DD_INSTRUMENTATION_TELEMETRY_ENABLED", "false").lower()
+ == "true"
+ ):
+ from ddtrace.internal.telemetry import telemetry_writer
+
+ telemetry_writer.enable()
+
+propagator = HTTPPropagator()
+
+
+def _convert_xray_trace_id(xray_trace_id):
+ """
+ Convert X-Ray trace id (hex)'s last 63 bits to a Datadog trace id (int).
+ """
+ return 0x7FFFFFFFFFFFFFFF & int(xray_trace_id[-16:], 16)
+
+
+def _convert_xray_entity_id(xray_entity_id):
+ """
+ Convert X-Ray (sub)segement id (hex) to a Datadog span id (int).
+ """
+ return int(xray_entity_id, 16)
+
+
+def _convert_xray_sampling(xray_sampled):
+ """
+ Convert X-Ray sampled (True/False) to its Datadog counterpart.
+ """
+ return SamplingPriority.USER_KEEP if xray_sampled else SamplingPriority.USER_REJECT
+
+
+def _get_xray_trace_context():
+ if not is_lambda_context():
+ return None
+
+ xray_trace_entity = parse_xray_header(
+ os.environ.get(XrayDaemon.XRAY_TRACE_ID_HEADER_NAME, "")
+ )
+ if xray_trace_entity is None:
+ return None
+ trace_context = Context(
+ trace_id=_convert_xray_trace_id(xray_trace_entity.get("trace_id")),
+ span_id=_convert_xray_entity_id(xray_trace_entity.get("parent_id")),
+ sampling_priority=_convert_xray_sampling(xray_trace_entity.get("sampled")),
+ )
+ logger.debug(
+ "Converted trace context %s from X-Ray segment %s",
+ trace_context,
+ (
+ xray_trace_entity["trace_id"],
+ xray_trace_entity["parent_id"],
+ xray_trace_entity["sampled"],
+ ),
+ )
+ return trace_context
+
+
+def _get_dd_trace_py_context():
+ span = tracer.current_span()
+ if not span:
+ return None
+
+ logger.debug(
+ "found dd trace context: %s", (span.context.trace_id, span.context.span_id)
+ )
+ return span.context
+
+
+def _is_context_complete(context):
+ return (
+ context
+ and context.trace_id
+ and context.span_id
+ and context.sampling_priority is not None
+ )
+
+
+def create_dd_dummy_metadata_subsegment(
+ subsegment_metadata_value, subsegment_metadata_key
+):
+ """
+ Create a Datadog subsegment to pass the Datadog trace context or Lambda function
+ tags into its metadata field, so the X-Ray trace can be converted to a Datadog
+ trace in the Datadog backend with the correct context.
+ """
+ send_segment(subsegment_metadata_key, subsegment_metadata_value)
+
+
+def extract_context_from_lambda_context(lambda_context):
+ """
+ Extract Datadog trace context from the `client_context` attr
+ from the Lambda `context` object.
+
+ dd_trace libraries inject this trace context on synchronous invocations
+ """
+ dd_data = None
+ client_context = lambda_context.client_context
+ if client_context and client_context.custom:
+ dd_data = client_context.custom
+ if "_datadog" in client_context.custom:
+ # Legacy trace propagation dict
+ dd_data = client_context.custom.get("_datadog")
+ return propagator.extract(dd_data)
+
+
+def extract_context_from_http_event_or_context(
+ event,
+ lambda_context,
+ event_source: _EventSource,
+ decode_authorizer_context: bool = True,
+):
+ """
+ Extract Datadog trace context from the `headers` key in from the Lambda
+ `event` object.
+
+ Falls back to lambda context if no trace data is found in the `headers`
+ """
+ if decode_authorizer_context:
+ is_http_api = event_source.equals(
+ EventTypes.API_GATEWAY, subtype=EventSubtypes.HTTP_API
+ )
+ injected_authorizer_data = get_injected_authorizer_data(event, is_http_api)
+ context = propagator.extract(injected_authorizer_data)
+ if _is_context_complete(context):
+ return context
+
+ headers = event.get("headers")
+ context = propagator.extract(headers)
+
+ if not _is_context_complete(context):
+ return extract_context_from_lambda_context(lambda_context)
+
+ return context
+
+
+def create_sns_event(message):
+ return {
+ "Records": [
+ {
+ "EventSource": "aws:sns",
+ "EventVersion": "1.0",
+ "Sns": message,
+ }
+ ]
+ }
+
+
+def extract_context_from_sqs_or_sns_event_or_context(event, lambda_context):
+ """
+ Extract Datadog trace context from an SQS event.
+
+ The extraction chain goes as follows:
+ EB => SQS (First records body contains EB context), or
+ SNS => SQS (First records body contains SNS context), or
+ SQS or SNS (`messageAttributes` for SQS context,
+ `MessageAttributes` for SNS context), else
+ Lambda Context.
+
+ Falls back to lambda context if no trace data is found in the SQS message attributes.
+ """
+
+ # EventBridge => SQS
+ try:
+ context = _extract_context_from_eventbridge_sqs_event(event)
+ if _is_context_complete(context):
+ return context
+ except Exception:
+ logger.debug("Failed extracting context as EventBridge to SQS.")
+
+ try:
+ first_record = event.get("Records")[0]
+
+ # logic to deal with SNS => SQS event
+ if "body" in first_record:
+ body_str = first_record.get("body", {})
+ try:
+ body = json.loads(body_str)
+ if body.get("Type", "") == "Notification" and "TopicArn" in body:
+ logger.debug("Found SNS message inside SQS event")
+ first_record = get_first_record(create_sns_event(body))
+ except Exception:
+ first_record = event.get("Records")[0]
+ pass
+
+ msg_attributes = first_record.get(
+ "messageAttributes",
+ first_record.get("Sns", {}).get("MessageAttributes", {}),
+ )
+ dd_payload = msg_attributes.get("_datadog", {})
+ # SQS uses dataType and binaryValue/stringValue
+ # SNS uses Type and Value
+ dd_json_data_type = dd_payload.get("Type", dd_payload.get("dataType", ""))
+ if dd_json_data_type == "Binary":
+ dd_json_data = dd_payload.get(
+ "binaryValue",
+ dd_payload.get("Value", r"{}"),
+ )
+ dd_json_data = base64.b64decode(dd_json_data)
+ elif dd_json_data_type == "String":
+ dd_json_data = dd_payload.get(
+ "stringValue",
+ dd_payload.get("Value", r"{}"),
+ )
+ else:
+ logger.debug(
+ "Datadog Lambda Python only supports extracting trace"
+ "context from String or Binary SQS/SNS message attributes"
+ )
+ return extract_context_from_lambda_context(lambda_context)
+ dd_data = json.loads(dd_json_data)
+ return propagator.extract(dd_data)
+ except Exception as e:
+ logger.debug("The trace extractor returned with error %s", e)
+ return extract_context_from_lambda_context(lambda_context)
+
+
+def _extract_context_from_eventbridge_sqs_event(event):
+ """
+ Extracts Datadog trace context from an SQS event triggered by
+ EventBridge.
+
+ This is only possible if first record in `Records` contains a
+ `body` field which contains the EventBridge `detail` as a JSON string.
+ """
+ first_record = event.get("Records")[0]
+ body_str = first_record.get("body")
+ body = json.loads(body_str)
+ detail = body.get("detail")
+ dd_context = detail.get("_datadog")
+ return propagator.extract(dd_context)
+
+
+def extract_context_from_eventbridge_event(event, lambda_context):
+ """
+ Extract datadog trace context from an EventBridge message's Details.
+ This is only possible if Details is a JSON string.
+ """
+ try:
+ detail = event.get("detail")
+ dd_context = detail.get("_datadog")
+ if not dd_context:
+ return extract_context_from_lambda_context(lambda_context)
+ return propagator.extract(dd_context)
+ except Exception as e:
+ logger.debug("The trace extractor returned with error %s", e)
+ return extract_context_from_lambda_context(lambda_context)
+
+
+def extract_context_from_kinesis_event(event, lambda_context):
+ """
+ Extract datadog trace context from a Kinesis Stream's base64 encoded data string
+ """
+ try:
+ record = get_first_record(event)
+ data = record.get("kinesis", {}).get("data", None)
+ if data:
+ b64_bytes = data.encode("ascii")
+ str_bytes = base64.b64decode(b64_bytes)
+ data_str = str_bytes.decode("ascii")
+ data_obj = json.loads(data_str)
+ dd_ctx = data_obj.get("_datadog")
+
+ if not dd_ctx:
+ return extract_context_from_lambda_context(lambda_context)
+
+ return propagator.extract(dd_ctx)
+ except Exception as e:
+ logger.debug("The trace extractor returned with error %s", e)
+ return extract_context_from_lambda_context(lambda_context)
+
+
+def _deterministic_md5_hash(s: str) -> int:
+ """MD5 here is to generate trace_id, not for any encryption."""
+ hex_number = hashlib.md5(s.encode("ascii")).hexdigest()
+ binary = bin(int(hex_number, 16))
+ binary_str = str(binary)
+ binary_str_remove_0b = binary_str[2:].rjust(128, "0")
+ most_significant_64_bits_without_leading_1 = "0" + binary_str_remove_0b[1:-64]
+ result = int(most_significant_64_bits_without_leading_1, 2)
+ if result == 0:
+ return 1
+ return result
+
+
+def extract_context_from_step_functions(event, lambda_context):
+ """
+ Only extract datadog trace context when Step Functions Context Object is injected
+ into lambda's event dict.
+ """
+ try:
+ execution_id = event.get("Execution").get("Id")
+ state_name = event.get("State").get("Name")
+ state_entered_time = event.get("State").get("EnteredTime")
+ trace_id = _deterministic_md5_hash(execution_id)
+ parent_id = _deterministic_md5_hash(
+ execution_id + "#" + state_name + "#" + state_entered_time
+ )
+ sampling_priority = SamplingPriority.AUTO_KEEP
+ return Context(
+ trace_id=trace_id, span_id=parent_id, sampling_priority=sampling_priority
+ )
+ except Exception as e:
+ logger.debug("The Step Functions trace extractor returned with error %s", e)
+ return extract_context_from_lambda_context(lambda_context)
+
+
+def extract_context_custom_extractor(extractor, event, lambda_context):
+ """
+ Extract Datadog trace context using a custom trace extractor function
+ """
+ try:
+ (
+ trace_id,
+ parent_id,
+ sampling_priority,
+ ) = extractor(event, lambda_context)
+ return Context(
+ trace_id=int(trace_id),
+ span_id=int(parent_id),
+ sampling_priority=int(sampling_priority),
+ )
+ except Exception as e:
+ logger.debug("The trace extractor returned with error %s", e)
+
+
+def is_authorizer_response(response) -> bool:
+ try:
+ return (
+ response is not None
+ and response["principalId"]
+ and response["policyDocument"]
+ )
+ except (KeyError, AttributeError):
+ pass
+ except Exception as e:
+ logger.debug("unknown error while checking is_authorizer_response %s", e)
+ return False
+
+
+def get_injected_authorizer_data(event, is_http_api) -> dict:
+ try:
+ authorizer_headers = event.get("requestContext", {}).get("authorizer")
+ if not authorizer_headers:
+ return None
+
+ dd_data_raw = (
+ authorizer_headers.get("lambda", {}).get("_datadog")
+ if is_http_api
+ else authorizer_headers.get("_datadog")
+ )
+
+ if not dd_data_raw:
+ return None
+
+ injected_data = json.loads(base64.b64decode(dd_data_raw))
+
+ # Lambda authorizer's results can be cached. But the payload will still have the injected
+ # data in cached requests. How to distinguish cached case and ignore the injected data ?
+ # APIGateway automatically injects a integrationLatency data in some cases. If it's >0 we
+ # know that it's not cached. But integrationLatency is not available for Http API case. In
+ # that case, we use the injected Authorizing_Request_Id to tell if it's cached. But token
+ # authorizers don't pass on the requestId. The Authorizing_Request_Id can't work for all
+ # cases neither. As a result, we combine both methods as shown below.
+ if authorizer_headers.get("integrationLatency", 0) > 0 or event.get(
+ "requestContext", {}
+ ).get("requestId") == injected_data.get(Headers.Authorizing_Request_Id):
+ return injected_data
+ else:
+ return None
+
+ except Exception as e:
+ logger.debug("Failed to check if invocated by an authorizer. error %s", e)
+ return None
+
+
+def extract_dd_trace_context(
+ event, lambda_context, extractor=None, decode_authorizer_context: bool = True
+):
+ """
+ Extract Datadog trace context from the Lambda `event` object.
+
+ Write the context to a global `dd_trace_context`, so the trace
+ can be continued on the outgoing requests with the context injected.
+ """
+ global dd_trace_context
+ trace_context_source = None
+ event_source = parse_event_source(event)
+
+ if extractor is not None:
+ context = extract_context_custom_extractor(extractor, event, lambda_context)
+ elif isinstance(event, (set, dict)) and "headers" in event:
+ context = extract_context_from_http_event_or_context(
+ event, lambda_context, event_source, decode_authorizer_context
+ )
+ elif event_source.equals(EventTypes.SNS) or event_source.equals(EventTypes.SQS):
+ context = extract_context_from_sqs_or_sns_event_or_context(
+ event, lambda_context
+ )
+ elif event_source.equals(EventTypes.EVENTBRIDGE):
+ context = extract_context_from_eventbridge_event(event, lambda_context)
+ elif event_source.equals(EventTypes.KINESIS):
+ context = extract_context_from_kinesis_event(event, lambda_context)
+ elif event_source.equals(EventTypes.STEPFUNCTIONS):
+ context = extract_context_from_step_functions(event, lambda_context)
+ else:
+ context = extract_context_from_lambda_context(lambda_context)
+
+ if _is_context_complete(context):
+ logger.debug("Extracted Datadog trace context from event or context")
+ dd_trace_context = context
+ trace_context_source = TraceContextSource.EVENT
+ else:
+ # AWS Lambda runtime caches global variables between invocations,
+ # reset to avoid using the context from the last invocation.
+ dd_trace_context = _get_xray_trace_context()
+ if dd_trace_context:
+ trace_context_source = TraceContextSource.XRAY
+ logger.debug("extracted dd trace context %s", dd_trace_context)
+ return dd_trace_context, trace_context_source, event_source
+
+
+def get_dd_trace_context_obj():
+ """
+ Return the Datadog trace context to be propagated on the outgoing requests.
+
+ If the Lambda function is invoked by a Datadog-traced service, a Datadog
+ trace context may already exist, and it should be used. Otherwise, use the
+ current X-Ray trace entity, or the dd-trace-py context if DD_TRACE_ENABLED is true.
+
+ Most of widely-used HTTP clients are patched to inject the context
+ automatically, but this function can be used to manually inject the trace
+ context to an outgoing request.
+ """
+ if dd_tracing_enabled:
+ dd_trace_py_context = _get_dd_trace_py_context()
+ if _is_context_complete(dd_trace_py_context):
+ return dd_trace_py_context
+
+ global dd_trace_context
+
+ try:
+ xray_context = _get_xray_trace_context() # xray (sub)segment
+ except Exception as e:
+ logger.debug(
+ "get_dd_trace_context couldn't read from segment from x-ray, with error %s"
+ % e
+ )
+ if not xray_context:
+ return None
+
+ if not _is_context_complete(dd_trace_context):
+ return xray_context
+
+ logger.debug("Set parent id from xray trace context: %s", xray_context.span_id)
+ return Context(
+ trace_id=dd_trace_context.trace_id,
+ span_id=xray_context.span_id,
+ sampling_priority=dd_trace_context.sampling_priority,
+ meta=dd_trace_context._meta.copy(),
+ metrics=dd_trace_context._metrics.copy(),
+ )
+
+
+def get_dd_trace_context():
+ """
+ Return the Datadog trace context to be propagated on the outgoing requests,
+ as a dict of headers.
+ """
+ headers = {}
+ context = get_dd_trace_context_obj()
+ if not _is_context_complete(context):
+ return headers
+ propagator.inject(context, headers)
+ return headers
+
+
+def set_correlation_ids():
+ """
+ Create a dummy span, and overrides its trace_id and span_id, to make
+ ddtrace.helpers.get_log_correlation_context() return a dict containing the correct ids for both
+ auto and manual log correlations.
+
+ TODO: Remove me when Datadog tracer is natively supported in Lambda.
+ """
+ if not is_lambda_context():
+ logger.debug("set_correlation_ids is only supported in LambdaContext")
+ return
+ if dd_tracing_enabled:
+ logger.debug("using ddtrace implementation for spans")
+ return
+
+ context = get_dd_trace_context_obj()
+ if not _is_context_complete(context):
+ return
+
+ tracer.context_provider.activate(context)
+ tracer.trace("dummy.span")
+ logger.debug("correlation ids set")
+
+
+def inject_correlation_ids():
+ """
+ Override the formatter of LambdaLoggerHandler to inject datadog trace and
+ span id for log correlation.
+
+ For manual injections to custom log handlers, use `ddtrace.helpers.get_log_correlation_context`
+ to retrieve a dict containing correlation ids (trace_id, span_id).
+ """
+ # Override the log format of the AWS provided LambdaLoggerHandler
+ root_logger = logging.getLogger()
+ for handler in root_logger.handlers:
+ if handler.__class__.__name__ == "LambdaLoggerHandler" and isinstance(
+ handler.formatter, logging.Formatter
+ ):
+ handler.setFormatter(
+ logging.Formatter(
+ "[%(levelname)s]\t%(asctime)s.%(msecs)dZ\t%(aws_request_id)s\t"
+ "[dd.trace_id=%(dd.trace_id)s dd.span_id=%(dd.span_id)s]\t%(message)s\n",
+ "%Y-%m-%dT%H:%M:%S",
+ )
+ )
+
+ # Patch `logging.Logger.makeRecord` to actually inject correlation ids
+ patch(logging=True)
+
+ logger.debug("logs injection configured")
+
+
+def is_lambda_context():
+ """
+ Return True if the X-Ray context is `LambdaContext`, rather than the
+ regular `Context` (e.g., when testing lambda functions locally).
+ """
+ return os.environ.get(XrayDaemon.FUNCTION_NAME_HEADER_NAME, "") != ""
+
+
+def set_dd_trace_py_root(trace_context_source, merge_xray_traces):
+ if trace_context_source == TraceContextSource.EVENT or merge_xray_traces:
+ context = Context(
+ trace_id=dd_trace_context.trace_id,
+ span_id=dd_trace_context.span_id,
+ sampling_priority=dd_trace_context.sampling_priority,
+ )
+ if merge_xray_traces:
+ xray_context = _get_xray_trace_context()
+ if xray_context and xray_context.span_id:
+ context.span_id = xray_context.span_id
+
+ tracer.context_provider.activate(context)
+ logger.debug(
+ "Set dd trace root context to: %s",
+ (context.trace_id, context.span_id),
+ )
+
+
+def create_inferred_span(
+ event,
+ context,
+ event_source: _EventSource = None,
+ decode_authorizer_context: bool = True,
+):
+ if event_source is None:
+ event_source = parse_event_source(event)
+ try:
+ if event_source.equals(
+ EventTypes.API_GATEWAY, subtype=EventSubtypes.API_GATEWAY
+ ):
+ logger.debug("API Gateway event detected. Inferring a span")
+ return create_inferred_span_from_api_gateway_event(
+ event, context, decode_authorizer_context
+ )
+ elif event_source.equals(EventTypes.LAMBDA_FUNCTION_URL):
+ logger.debug("Function URL event detected. Inferring a span")
+ return create_inferred_span_from_lambda_function_url_event(event, context)
+ elif event_source.equals(
+ EventTypes.API_GATEWAY, subtype=EventSubtypes.HTTP_API
+ ):
+ logger.debug("HTTP API event detected. Inferring a span")
+ return create_inferred_span_from_http_api_event(
+ event, context, decode_authorizer_context
+ )
+ elif event_source.equals(
+ EventTypes.API_GATEWAY, subtype=EventSubtypes.WEBSOCKET
+ ):
+ logger.debug("API Gateway Websocket event detected. Inferring a span")
+ return create_inferred_span_from_api_gateway_websocket_event(
+ event, context, decode_authorizer_context
+ )
+ elif event_source.equals(EventTypes.SQS):
+ logger.debug("SQS event detected. Inferring a span")
+ return create_inferred_span_from_sqs_event(event, context)
+ elif event_source.equals(EventTypes.SNS):
+ logger.debug("SNS event detected. Inferring a span")
+ return create_inferred_span_from_sns_event(event, context)
+ elif event_source.equals(EventTypes.KINESIS):
+ logger.debug("Kinesis event detected. Inferring a span")
+ return create_inferred_span_from_kinesis_event(event, context)
+ elif event_source.equals(EventTypes.DYNAMODB):
+ logger.debug("Dynamodb event detected. Inferring a span")
+ return create_inferred_span_from_dynamodb_event(event, context)
+ elif event_source.equals(EventTypes.S3):
+ logger.debug("S3 event detected. Inferring a span")
+ return create_inferred_span_from_s3_event(event, context)
+ elif event_source.equals(EventTypes.EVENTBRIDGE):
+ logger.debug("Eventbridge event detected. Inferring a span")
+ return create_inferred_span_from_eventbridge_event(event, context)
+ except Exception as e:
+ logger.debug(
+ "Unable to infer span. Detected type: %s. Reason: %s",
+ event_source.to_string(),
+ e,
+ )
+ return None
+ logger.debug("Unable to infer a span: unknown event type")
+ return None
+
+
+def create_service_mapping(val):
+ new_service_mapping = {}
+ for entry in val.split(","):
+ parts = entry.split(":")
+ if len(parts) == 2:
+ key = parts[0].strip()
+ value = parts[1].strip()
+ if key != value and key and value:
+ new_service_mapping[key] = value
+ return new_service_mapping
+
+
+def determine_service_name(service_mapping, specific_key, generic_key, default_value):
+ service_name = service_mapping.get(specific_key)
+ if service_name is None:
+ service_name = service_mapping.get(generic_key, default_value)
+ return service_name
+
+
+service_mapping = {}
+# Initialization code
+service_mapping_str = os.getenv("DD_SERVICE_MAPPING", "")
+service_mapping = create_service_mapping(service_mapping_str)
+
+
+def create_inferred_span_from_lambda_function_url_event(event, context):
+ request_context = event.get("requestContext")
+ api_id = request_context.get("apiId")
+ domain = request_context.get("domainName")
+ service_name = determine_service_name(service_mapping, api_id, "lambda_url", domain)
+ method = request_context.get("http", {}).get("method")
+ path = request_context.get("http", {}).get("path")
+ resource = "{0} {1}".format(method, path)
+ tags = {
+ "operation_name": "aws.lambda.url",
+ "http.url": domain + path,
+ "endpoint": path,
+ "http.method": method,
+ "resource_names": domain + path,
+ "request_id": context.aws_request_id,
+ }
+ request_time_epoch = request_context.get("timeEpoch")
+ args = {
+ "service": service_name,
+ "resource": resource,
+ "span_type": "http",
+ }
+ tracer.set_tags(
+ {"_dd.origin": "lambda"}
+ ) # function urls don't count as lambda_inferred,
+ # because they're in the same service as the inferring lambda function
+ span = tracer.trace("aws.lambda.url", **args)
+ InferredSpanInfo.set_tags(tags, tag_source="self", synchronicity="sync")
+ if span:
+ span.set_tags(tags)
+ span.start = request_time_epoch / 1000
+ return span
+
+
+def is_api_gateway_invocation_async(event):
+ return event.get("headers", {}).get("X-Amz-Invocation-Type") == "Event"
+
+
+def insert_upstream_authorizer_span(
+ kwargs_to_start_span, other_tags_for_span, start_time_ns, finish_time_ns
+):
+ """Insert the authorizer span.
+ Without this: parent span --child-> inferred span
+ With this insertion: parent span --child-> upstreamAuthorizerSpan --child-> inferred span
+
+ Args:
+ kwargs_to_start_span (Dict): the same keyword arguments used for the inferred span
+ other_tags_for_span (Dict): the same tag keyword arguments used for the inferred span
+ start_time_ns (int): the start time of the span in nanoseconds
+ finish_time_ns (int): the finish time of the sapn in nanoseconds
+ """
+ trace_ctx = tracer.current_trace_context()
+ upstream_authorizer_span = tracer.trace(
+ "aws.apigateway.authorizer", **kwargs_to_start_span
+ )
+ upstream_authorizer_span.set_tags(other_tags_for_span)
+ upstream_authorizer_span.set_tag("operation_name", "aws.apigateway.authorizer")
+ # always sync for the authorizer invocation
+ InferredSpanInfo.set_tags_to_span(upstream_authorizer_span, synchronicity="sync")
+ upstream_authorizer_span.start_ns = int(start_time_ns)
+ upstream_authorizer_span.finish(finish_time_ns / 1e9)
+ # trace context needs to be set again as it is reset by finish()
+ tracer.context_provider.activate(trace_ctx)
+ return upstream_authorizer_span
+
+
+def process_injected_data(event, request_time_epoch_ms, args, tags):
+ """
+ This covers the ApiGateway RestAPI and Websocket cases. It doesn't cover Http API cases.
+ """
+ injected_authorizer_data = get_injected_authorizer_data(event, False)
+ if injected_authorizer_data:
+ try:
+ start_time_ns = int(
+ injected_authorizer_data.get(Headers.Parent_Span_Finish_Time)
+ )
+ finish_time_ns = (
+ request_time_epoch_ms
+ + (
+ int(
+ event["requestContext"]["authorizer"].get(
+ "integrationLatency", 0
+ )
+ )
+ )
+ ) * 1e6
+ upstream_authorizer_span = insert_upstream_authorizer_span(
+ args, tags, start_time_ns, finish_time_ns
+ )
+ return upstream_authorizer_span, finish_time_ns
+ except Exception as e:
+ logger.debug(
+ "Unable to insert authorizer span. Continue to generate the main span.\
+ Reason: %s",
+ e,
+ )
+ return None, None
+ else:
+ return None, None
+
+
+def create_inferred_span_from_api_gateway_websocket_event(
+ event, context, decode_authorizer_context: bool = True
+):
+ request_context = event.get("requestContext")
+ domain = request_context.get("domainName")
+ endpoint = request_context.get("routeKey")
+ api_id = request_context.get("apiId")
+
+ service_name = determine_service_name(
+ service_mapping, api_id, "lambda_api_gateway", domain
+ )
+ tags = {
+ "operation_name": "aws.apigateway.websocket",
+ "http.url": domain + endpoint,
+ "endpoint": endpoint,
+ "resource_names": endpoint,
+ "apiid": api_id,
+ "apiname": api_id,
+ "stage": request_context.get("stage"),
+ "request_id": context.aws_request_id,
+ "connection_id": request_context.get("connectionId"),
+ "event_type": request_context.get("eventType"),
+ "message_direction": request_context.get("messageDirection"),
+ }
+ request_time_epoch_ms = int(request_context.get("requestTimeEpoch"))
+ if is_api_gateway_invocation_async(event):
+ InferredSpanInfo.set_tags(tags, tag_source="self", synchronicity="async")
+ else:
+ InferredSpanInfo.set_tags(tags, tag_source="self", synchronicity="sync")
+ args = {
+ "service": service_name,
+ "resource": endpoint,
+ "span_type": "web",
+ }
+ tracer.set_tags({"_dd.origin": "lambda"})
+ upstream_authorizer_span = None
+ finish_time_ns = None
+ if decode_authorizer_context:
+ upstream_authorizer_span, finish_time_ns = process_injected_data(
+ event, request_time_epoch_ms, args, tags
+ )
+ span = tracer.trace("aws.apigateway.websocket", **args)
+ if span:
+ span.set_tags(tags)
+ span.start_ns = int(
+ finish_time_ns
+ if finish_time_ns is not None
+ else request_time_epoch_ms * 1e6
+ )
+ if upstream_authorizer_span:
+ span.parent_id = upstream_authorizer_span.span_id
+ return span
+
+
+def create_inferred_span_from_api_gateway_event(
+ event, context, decode_authorizer_context: bool = True
+):
+ request_context = event.get("requestContext")
+ domain = request_context.get("domainName", "")
+ api_id = request_context.get("apiId")
+ service_name = determine_service_name(
+ service_mapping, api_id, "lambda_api_gateway", domain
+ )
+ method = event.get("httpMethod")
+ path = event.get("path")
+ resource = "{0} {1}".format(method, path)
+ tags = {
+ "operation_name": "aws.apigateway.rest",
+ "http.url": domain + path,
+ "endpoint": path,
+ "http.method": method,
+ "resource_names": resource,
+ "apiid": api_id,
+ "apiname": api_id,
+ "stage": request_context.get("stage"),
+ "request_id": context.aws_request_id,
+ }
+ request_time_epoch_ms = int(request_context.get("requestTimeEpoch"))
+ if is_api_gateway_invocation_async(event):
+ InferredSpanInfo.set_tags(tags, tag_source="self", synchronicity="async")
+ else:
+ InferredSpanInfo.set_tags(tags, tag_source="self", synchronicity="sync")
+ args = {
+ "service": service_name,
+ "resource": resource,
+ "span_type": "http",
+ }
+ tracer.set_tags({"_dd.origin": "lambda"})
+ upstream_authorizer_span = None
+ finish_time_ns = None
+ if decode_authorizer_context:
+ upstream_authorizer_span, finish_time_ns = process_injected_data(
+ event, request_time_epoch_ms, args, tags
+ )
+ span = tracer.trace("aws.apigateway", **args)
+ if span:
+ span.set_tags(tags)
+ # start time pushed by the inserted authorizer span
+ span.start_ns = int(
+ finish_time_ns
+ if finish_time_ns is not None
+ else request_time_epoch_ms * 1e6
+ )
+ if upstream_authorizer_span:
+ span.parent_id = upstream_authorizer_span.span_id
+ return span
+
+
+def create_inferred_span_from_http_api_event(
+ event, context, decode_authorizer_context: bool = True
+):
+ request_context = event.get("requestContext")
+ domain = request_context.get("domainName")
+ api_id = request_context.get("apiId")
+ service_name = determine_service_name(
+ service_mapping, api_id, "lambda_api_gateway", domain
+ )
+ method = request_context.get("http", {}).get("method")
+ path = event.get("rawPath")
+ resource = "{0} {1}".format(method, path)
+ tags = {
+ "operation_name": "aws.httpapi",
+ "endpoint": path,
+ "http.url": domain + path,
+ "http.method": request_context.get("http", {}).get("method"),
+ "http.protocol": request_context.get("http", {}).get("protocol"),
+ "http.source_ip": request_context.get("http", {}).get("sourceIp"),
+ "http.user_agent": request_context.get("http", {}).get("userAgent"),
+ "resource_names": resource,
+ "request_id": context.aws_request_id,
+ "apiid": api_id,
+ "apiname": api_id,
+ "stage": request_context.get("stage"),
+ }
+ request_time_epoch_ms = int(request_context.get("timeEpoch"))
+ if is_api_gateway_invocation_async(event):
+ InferredSpanInfo.set_tags(tags, tag_source="self", synchronicity="async")
+ else:
+ InferredSpanInfo.set_tags(tags, tag_source="self", synchronicity="sync")
+ args = {
+ "service": service_name,
+ "resource": resource,
+ "span_type": "http",
+ }
+ tracer.set_tags({"_dd.origin": "lambda"})
+ inferred_span_start_ns = request_time_epoch_ms * 1e6
+ if decode_authorizer_context:
+ injected_authorizer_data = get_injected_authorizer_data(event, True)
+ if injected_authorizer_data:
+ inferred_span_start_ns = injected_authorizer_data.get(
+ Headers.Parent_Span_Finish_Time
+ )
+ span = tracer.trace("aws.httpapi", **args)
+ if span:
+ span.set_tags(tags)
+ span.start_ns = int(inferred_span_start_ns)
+ return span
+
+
+def create_inferred_span_from_sqs_event(event, context):
+ trace_ctx = tracer.current_trace_context()
+
+ event_record = get_first_record(event)
+ event_source_arn = event_record.get("eventSourceARN")
+ queue_name = event_source_arn.split(":")[-1]
+ service_name = determine_service_name(
+ service_mapping, queue_name, "lambda_sqs", "sqs"
+ )
+ tags = {
+ "operation_name": "aws.sqs",
+ "resource_names": queue_name,
+ "queuename": queue_name,
+ "event_source_arn": event_source_arn,
+ "receipt_handle": event_record.get("receiptHandle"),
+ "sender_id": event_record.get("attributes", {}).get("SenderId"),
+ }
+ InferredSpanInfo.set_tags(tags, tag_source="self", synchronicity="async")
+ request_time_epoch = event_record.get("attributes", {}).get("SentTimestamp")
+ args = {
+ "service": service_name,
+ "resource": queue_name,
+ "span_type": "web",
+ }
+ start_time = int(request_time_epoch) / 1000
+
+ upstream_span = None
+ if "body" in event_record:
+ body_str = event_record.get("body", {})
+ try:
+ body = json.loads(body_str)
+
+ # logic to deal with SNS => SQS event
+ if body.get("Type", "") == "Notification" and "TopicArn" in body:
+ logger.debug("Found SNS message inside SQS event")
+ upstream_span = create_inferred_span_from_sns_event(
+ create_sns_event(body), context
+ )
+ upstream_span.finish(finish_time=start_time)
+
+ # EventBridge => SQS
+ elif body.get("detail"):
+ detail = body.get("detail")
+ if detail.get("_datadog"):
+ logger.debug("Found an EventBridge message inside SQS event")
+ upstream_span = create_inferred_span_from_eventbridge_event(
+ body, context
+ )
+ upstream_span.finish(finish_time=start_time)
+
+ except Exception as e:
+ logger.debug(
+ "Unable to create upstream span from SQS message, with error %s" % e
+ )
+ pass
+
+ # trace context needs to be set again as it is reset
+ # when sns_span.finish executes
+ tracer.context_provider.activate(trace_ctx)
+ tracer.set_tags({"_dd.origin": "lambda"})
+ span = tracer.trace("aws.sqs", **args)
+ if span:
+ span.set_tags(tags)
+ span.start = start_time
+ if upstream_span:
+ span.parent_id = upstream_span.span_id
+
+ return span
+
+
+def create_inferred_span_from_sns_event(event, context):
+ event_record = get_first_record(event)
+ sns_message = event_record.get("Sns")
+ topic_arn = event_record.get("Sns", {}).get("TopicArn")
+ topic_name = topic_arn.split(":")[-1]
+ service_name = determine_service_name(
+ service_mapping, topic_name, "lambda_sns", "sns"
+ )
+ tags = {
+ "operation_name": "aws.sns",
+ "resource_names": topic_name,
+ "topicname": topic_name,
+ "topic_arn": topic_arn,
+ "message_id": sns_message.get("MessageId"),
+ "type": sns_message.get("Type"),
+ }
+
+ # Subject not available in SNS => SQS scenario
+ if "Subject" in sns_message and sns_message["Subject"]:
+ tags["subject"] = sns_message.get("Subject")
+
+ InferredSpanInfo.set_tags(tags, tag_source="self", synchronicity="async")
+ sns_dt_format = "%Y-%m-%dT%H:%M:%S.%fZ"
+ timestamp = event_record.get("Sns", {}).get("Timestamp")
+ dt = datetime.strptime(timestamp, sns_dt_format)
+
+ args = {
+ "service": service_name,
+ "resource": topic_name,
+ "span_type": "web",
+ }
+ tracer.set_tags({"_dd.origin": "lambda"})
+ span = tracer.trace("aws.sns", **args)
+ if span:
+ span.set_tags(tags)
+ span.start = dt.replace(tzinfo=timezone.utc).timestamp()
+ return span
+
+
+def create_inferred_span_from_kinesis_event(event, context):
+ event_record = get_first_record(event)
+ event_source_arn = event_record.get("eventSourceARN")
+ event_id = event_record.get("eventID")
+ stream_name = event_source_arn.split(":")[-1]
+ shard_id = event_id.split(":")[0]
+ service_name = determine_service_name(
+ service_mapping, stream_name, "lambda_kinesis", "kinesis"
+ )
+ tags = {
+ "operation_name": "aws.kinesis",
+ "resource_names": stream_name,
+ "streamname": stream_name,
+ "shardid": shard_id,
+ "event_source_arn": event_source_arn,
+ "event_id": event_id,
+ "event_name": event_record.get("eventName"),
+ "event_version": event_record.get("eventVersion"),
+ "partition_key": event_record.get("kinesis", {}).get("partitionKey"),
+ }
+ InferredSpanInfo.set_tags(tags, tag_source="self", synchronicity="async")
+ request_time_epoch = event_record.get("kinesis", {}).get(
+ "approximateArrivalTimestamp"
+ )
+
+ args = {
+ "service": service_name,
+ "resource": stream_name,
+ "span_type": "web",
+ }
+ tracer.set_tags({"_dd.origin": "lambda"})
+ span = tracer.trace("aws.kinesis", **args)
+ if span:
+ span.set_tags(tags)
+ span.start = request_time_epoch
+ return span
+
+
+def create_inferred_span_from_dynamodb_event(event, context):
+ event_record = get_first_record(event)
+ event_source_arn = event_record.get("eventSourceARN")
+ table_name = event_source_arn.split("/")[1]
+ service_name = determine_service_name(
+ service_mapping, table_name, "lambda_dynamodb", "dynamodb"
+ )
+ dynamodb_message = event_record.get("dynamodb")
+ tags = {
+ "operation_name": "aws.dynamodb",
+ "resource_names": table_name,
+ "tablename": table_name,
+ "event_source_arn": event_source_arn,
+ "event_id": event_record.get("eventID"),
+ "event_name": event_record.get("eventName"),
+ "event_version": event_record.get("eventVersion"),
+ "stream_view_type": dynamodb_message.get("StreamViewType"),
+ "size_bytes": str(dynamodb_message.get("SizeBytes")),
+ }
+ InferredSpanInfo.set_tags(tags, synchronicity="async", tag_source="self")
+ request_time_epoch = event_record.get("dynamodb", {}).get(
+ "ApproximateCreationDateTime"
+ )
+ args = {
+ "service": service_name,
+ "resource": table_name,
+ "span_type": "web",
+ }
+ tracer.set_tags({"_dd.origin": "lambda"})
+ span = tracer.trace("aws.dynamodb", **args)
+ if span:
+ span.set_tags(tags)
+
+ span.start = int(request_time_epoch)
+ return span
+
+
+def create_inferred_span_from_s3_event(event, context):
+ event_record = get_first_record(event)
+ bucket_name = event_record.get("s3", {}).get("bucket", {}).get("name")
+ service_name = determine_service_name(
+ service_mapping, bucket_name, "lambda_s3", "s3"
+ )
+ tags = {
+ "operation_name": "aws.s3",
+ "resource_names": bucket_name,
+ "event_name": event_record.get("eventName"),
+ "bucketname": bucket_name,
+ "bucket_arn": event_record.get("s3", {}).get("bucket", {}).get("arn"),
+ "object_key": event_record.get("s3", {}).get("object", {}).get("key"),
+ "object_size": str(event_record.get("s3", {}).get("object", {}).get("size")),
+ "object_etag": event_record.get("s3", {}).get("object", {}).get("eTag"),
+ }
+ InferredSpanInfo.set_tags(tags, synchronicity="async", tag_source="self")
+ dt_format = "%Y-%m-%dT%H:%M:%S.%fZ"
+ timestamp = event_record.get("eventTime")
+ dt = datetime.strptime(timestamp, dt_format)
+
+ args = {
+ "service": service_name,
+ "resource": bucket_name,
+ "span_type": "web",
+ }
+ tracer.set_tags({"_dd.origin": "lambda"})
+ span = tracer.trace("aws.s3", **args)
+ if span:
+ span.set_tags(tags)
+ span.start = dt.replace(tzinfo=timezone.utc).timestamp()
+ return span
+
+
+def create_inferred_span_from_eventbridge_event(event, context):
+ source = event.get("source")
+ service_name = determine_service_name(
+ service_mapping, source, "lambda_eventbridge", "eventbridge"
+ )
+ tags = {
+ "operation_name": "aws.eventbridge",
+ "resource_names": source,
+ "detail_type": event.get("detail-type"),
+ }
+ InferredSpanInfo.set_tags(
+ tags,
+ synchronicity="async",
+ tag_source="self",
+ )
+ dt_format = "%Y-%m-%dT%H:%M:%SZ"
+ timestamp = event.get("time")
+ dt = datetime.strptime(timestamp, dt_format)
+
+ args = {
+ "service": service_name,
+ "resource": source,
+ "span_type": "web",
+ }
+ tracer.set_tags({"_dd.origin": "lambda"})
+ span = tracer.trace("aws.eventbridge", **args)
+ if span:
+ span.set_tags(tags)
+ span.start = dt.replace(tzinfo=timezone.utc).timestamp()
+ return span
+
+
+def create_function_execution_span(
+ context,
+ function_name,
+ is_cold_start,
+ is_proactive_init,
+ trace_context_source,
+ merge_xray_traces,
+ trigger_tags,
+ parent_span=None,
+):
+ tags = {}
+ if context:
+ function_arn = (context.invoked_function_arn or "").lower()
+ tk = function_arn.split(":")
+ function_arn = ":".join(tk[0:7]) if len(tk) > 7 else function_arn
+ function_version = tk[7] if len(tk) > 7 else "$LATEST"
+ tags = {
+ "cold_start": str(is_cold_start).lower(),
+ "function_arn": function_arn,
+ "function_version": function_version,
+ "request_id": context.aws_request_id,
+ "resource_names": context.function_name,
+ "functionname": context.function_name.lower()
+ if context.function_name
+ else None,
+ "datadog_lambda": datadog_lambda_version,
+ "dd_trace": ddtrace_version,
+ "span.name": "aws.lambda",
+ }
+ if is_proactive_init:
+ tags["proactive_initialization"] = str(is_proactive_init).lower()
+ if trace_context_source == TraceContextSource.XRAY and merge_xray_traces:
+ tags["_dd.parent_source"] = trace_context_source
+ tags.update(trigger_tags)
+ args = {
+ "service": "aws.lambda",
+ "resource": function_name,
+ "span_type": "serverless",
+ }
+ tracer.set_tags({"_dd.origin": "lambda"})
+ span = tracer.trace("aws.lambda", **args)
+ if span:
+ span.set_tags(tags)
+ if parent_span:
+ span.parent_id = parent_span.span_id
+ return span
+
+
+def mark_trace_as_error_for_5xx_responses(context, status_code, span):
+ if len(status_code) == 3 and status_code.startswith("5"):
+ submit_errors_metric(context)
+ if span:
+ span.error = 1
+
+
+class InferredSpanInfo(object):
+ BASE_NAME = "_inferred_span"
+ SYNCHRONICITY = f"{BASE_NAME}.synchronicity"
+ TAG_SOURCE = f"{BASE_NAME}.tag_source"
+
+ @staticmethod
+ def set_tags(
+ tags: Dict[str, str],
+ synchronicity: Optional[Literal["sync", "async"]] = None,
+ tag_source: Optional[Literal["labmda", "self"]] = None,
+ ):
+ if synchronicity is not None:
+ tags[InferredSpanInfo.SYNCHRONICITY] = str(synchronicity)
+ if tag_source is not None:
+ tags[InferredSpanInfo.TAG_SOURCE] = str(tag_source)
+
+ @staticmethod
+ def set_tags_to_span(
+ span: Span,
+ synchronicity: Optional[Literal["sync", "async"]] = None,
+ tag_source: Optional[Literal["labmda", "self"]] = None,
+ ):
+ if synchronicity is not None:
+ span.set_tags({InferredSpanInfo.SYNCHRONICITY: synchronicity})
+ if tag_source is not None:
+ span.set_tags({InferredSpanInfo.TAG_SOURCE: str(tag_source)})
+
+ @staticmethod
+ def is_async(span: Span) -> bool:
+ if not span:
+ return False
+ try:
+ return span.get_tag(InferredSpanInfo.SYNCHRONICITY) == "async"
+ except Exception as e:
+ logger.debug(
+ "Unabled to read the %s tag, returning False. \
+ Reason: %s.",
+ InferredSpanInfo.SYNCHRONICITY,
+ e,
+ )
+ return False
diff --git a/lambdas/aws-dd-forwarder-3.127.0/datadog_lambda/trigger.py b/lambdas/aws-dd-forwarder-3.127.0/datadog_lambda/trigger.py
new file mode 100644
index 0000000..bbb44b3
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/datadog_lambda/trigger.py
@@ -0,0 +1,352 @@
+# Unless explicitly stated otherwise all files in this repository are licensed
+# under the Apache License Version 2.0.
+# This product includes software developed at Datadog (https://www.datadoghq.com/).
+# Copyright 2019 Datadog, Inc.
+
+import base64
+import gzip
+import json
+from io import BytesIO, BufferedReader
+from enum import Enum
+from typing import Any
+
+
+class _stringTypedEnum(Enum):
+ """
+ _stringTypedEnum provides a type-hinted convenience function for getting the string value of
+ an enum.
+ """
+
+ def get_string(self) -> str:
+ return self.value
+
+
+class EventTypes(_stringTypedEnum):
+ """
+ EventTypes is an enum of Lambda event types we care about.
+ """
+
+ UNKNOWN = "unknown"
+ API_GATEWAY = "api-gateway"
+ APPSYNC = "appsync"
+ ALB = "application-load-balancer"
+ CLOUDWATCH_LOGS = "cloudwatch-logs"
+ CLOUDWATCH_EVENTS = "cloudwatch-events"
+ CLOUDFRONT = "cloudfront"
+ DYNAMODB = "dynamodb"
+ EVENTBRIDGE = "eventbridge"
+ KINESIS = "kinesis"
+ LAMBDA_FUNCTION_URL = "lambda-function-url"
+ S3 = "s3"
+ SNS = "sns"
+ SQS = "sqs"
+ STEPFUNCTIONS = "states"
+
+
+class EventSubtypes(_stringTypedEnum):
+ """
+ EventSubtypes is an enum of Lambda event subtypes.
+ Currently, API Gateway events subtypes are supported,
+ e.g. HTTP-API and Websocket events vs vanilla API-Gateway events.
+ """
+
+ NONE = "none"
+ API_GATEWAY = "api-gateway" # regular API Gateway
+ WEBSOCKET = "websocket"
+ HTTP_API = "http-api"
+
+
+class _EventSource:
+ """
+ _EventSource holds an event's type and subtype.
+ """
+
+ def __init__(
+ self,
+ event_type: EventTypes,
+ subtype: EventSubtypes = EventSubtypes.NONE,
+ ):
+ self.event_type = event_type
+ self.subtype = subtype
+
+ def to_string(self) -> str:
+ """
+ to_string returns the string representation of an _EventSource.
+ Since to_string was added to support trigger tagging,
+ the event's subtype will never be included in the string.
+ """
+ return self.event_type.get_string()
+
+ def equals(
+ self, event_type: EventTypes, subtype: EventSubtypes = EventSubtypes.NONE
+ ) -> bool:
+ """
+ equals provides syntactic sugar to determine whether this _EventSource has a given type
+ and subtype.
+ Unknown events will never equal other events.
+ """
+ if self.event_type == EventTypes.UNKNOWN:
+ return False
+ if self.event_type != event_type:
+ return False
+ if self.subtype != subtype:
+ return False
+ return True
+
+
+def get_aws_partition_by_region(region):
+ if region.startswith("us-gov-"):
+ return "aws-us-gov"
+ if region.startswith("cn-"):
+ return "aws-cn"
+ return "aws"
+
+
+def get_first_record(event):
+ records = event.get("Records")
+ if records and len(records) > 0:
+ return records[0]
+
+
+def parse_event_source(event: dict) -> _EventSource:
+ """Determines the source of the trigger event"""
+ if type(event) is not dict:
+ return _EventSource(EventTypes.UNKNOWN)
+
+ event_source = _EventSource(EventTypes.UNKNOWN)
+
+ request_context = event.get("requestContext")
+ if request_context and request_context.get("stage"):
+ if "domainName" in request_context and detect_lambda_function_url_domain(
+ request_context.get("domainName")
+ ):
+ return _EventSource(EventTypes.LAMBDA_FUNCTION_URL)
+ event_source = _EventSource(EventTypes.API_GATEWAY)
+ if "httpMethod" in event:
+ event_source.subtype = EventSubtypes.API_GATEWAY
+ if "routeKey" in event:
+ event_source.subtype = EventSubtypes.HTTP_API
+ if event.get("requestContext", {}).get("messageDirection"):
+ event_source.subtype = EventSubtypes.WEBSOCKET
+
+ if request_context and request_context.get("elb"):
+ event_source = _EventSource(EventTypes.ALB)
+
+ if event.get("awslogs"):
+ event_source = _EventSource(EventTypes.CLOUDWATCH_LOGS)
+
+ if event.get("detail-type"):
+ event_source = _EventSource(EventTypes.EVENTBRIDGE)
+
+ event_detail = event.get("detail")
+ has_event_categories = (
+ isinstance(event_detail, dict)
+ and event_detail.get("EventCategories") is not None
+ )
+ if event.get("source") == "aws.events" or has_event_categories:
+ event_source = _EventSource(EventTypes.CLOUDWATCH_EVENTS)
+
+ if "Execution" in event and "StateMachine" in event and "State" in event:
+ event_source = _EventSource(EventTypes.STEPFUNCTIONS)
+
+ event_record = get_first_record(event)
+ if event_record:
+ aws_event_source = event_record.get(
+ "eventSource", event_record.get("EventSource")
+ )
+
+ if aws_event_source == "aws:dynamodb":
+ event_source = _EventSource(EventTypes.DYNAMODB)
+ if aws_event_source == "aws:kinesis":
+ event_source = _EventSource(EventTypes.KINESIS)
+ if aws_event_source == "aws:s3":
+ event_source = _EventSource(EventTypes.S3)
+ if aws_event_source == "aws:sns":
+ event_source = _EventSource(EventTypes.SNS)
+ if aws_event_source == "aws:sqs":
+ event_source = _EventSource(EventTypes.SQS)
+
+ if event_record.get("cf"):
+ event_source = _EventSource(EventTypes.CLOUDFRONT)
+
+ return event_source
+
+
+def detect_lambda_function_url_domain(domain: str) -> bool:
+ # e.g. "etsn5fibjr.lambda-url.eu-south-1.amazonaws.com"
+ domain_parts = domain.split(".")
+ if len(domain_parts) < 2:
+ return False
+ return domain_parts[1] == "lambda-url"
+
+
+def parse_event_source_arn(source: _EventSource, event: dict, context: Any) -> str:
+ """
+ Parses the trigger event for an available ARN. If an ARN field is not provided
+ in the event we stitch it together.
+ """
+ split_function_arn = context.invoked_function_arn.split(":")
+ region = split_function_arn[3]
+ account_id = split_function_arn[4]
+ aws_arn = get_aws_partition_by_region(region)
+
+ event_record = get_first_record(event)
+ # e.g. arn:aws:s3:::lambda-xyz123-abc890
+ if source.to_string() == "s3":
+ return event_record.get("s3", {}).get("bucket", {}).get("arn")
+
+ # e.g. arn:aws:sns:us-east-1:123456789012:sns-lambda
+ if source.to_string() == "sns":
+ return event_record.get("Sns", {}).get("TopicArn")
+
+ # e.g. arn:aws:cloudfront::123456789012:distribution/ABC123XYZ
+ if source.event_type == EventTypes.CLOUDFRONT:
+ distribution_id = (
+ event_record.get("cf", {}).get("config", {}).get("distributionId")
+ )
+ return "arn:{}:cloudfront::{}:distribution/{}".format(
+ aws_arn, account_id, distribution_id
+ )
+
+ # e.g. arn:aws:lambda:::url::
+ if source.equals(EventTypes.LAMBDA_FUNCTION_URL):
+ function_name = ""
+ if len(split_function_arn) >= 7:
+ function_name = split_function_arn[6]
+ function_arn = f"arn:aws:lambda:{region}:{account_id}:url:{function_name}"
+ function_qualifier = ""
+ if len(split_function_arn) >= 8:
+ function_qualifier = split_function_arn[7]
+ function_arn = function_arn + f":{function_qualifier}"
+ return function_arn
+
+ # e.g. arn:aws:apigateway:us-east-1::/restapis/xyz123/stages/default
+ if source.event_type == EventTypes.API_GATEWAY:
+ request_context = event.get("requestContext")
+ return "arn:{}:apigateway:{}::/restapis/{}/stages/{}".format(
+ aws_arn, region, request_context.get("apiId"), request_context.get("stage")
+ )
+
+ # e.g. arn:aws:elasticloadbalancing:us-east-1:123456789012:targetgroup/lambda-xyz/123
+ if source.event_type == EventTypes.ALB:
+ request_context = event.get("requestContext")
+ return request_context.get("elb", {}).get("targetGroupArn")
+
+ # e.g. arn:aws:logs:us-west-1:123456789012:log-group:/my-log-group-xyz
+ if source.event_type == EventTypes.CLOUDWATCH_LOGS:
+ with gzip.GzipFile(
+ fileobj=BytesIO(base64.b64decode(event.get("awslogs", {}).get("data")))
+ ) as decompress_stream:
+ data = b"".join(BufferedReader(decompress_stream))
+ logs = json.loads(data)
+ log_group = logs.get("logGroup", "cloudwatch")
+ return "arn:{}:logs:{}:{}:log-group:{}".format(
+ aws_arn, region, account_id, log_group
+ )
+
+ # e.g. arn:aws:events:us-east-1:123456789012:rule/my-schedule
+ if source.event_type == EventTypes.CLOUDWATCH_EVENTS and event.get("resources"):
+ return event.get("resources")[0]
+
+
+def get_event_source_arn(source: _EventSource, event: dict, context: Any) -> str:
+ event_source_arn = event.get("eventSourceARN") or event.get("eventSourceArn")
+
+ event_record = get_first_record(event)
+ if event_record:
+ event_source_arn = event_record.get("eventSourceARN") or event_record.get(
+ "eventSourceArn"
+ )
+
+ if event_source_arn is None:
+ event_source_arn = parse_event_source_arn(source, event, context)
+
+ return event_source_arn
+
+
+def extract_http_tags(event):
+ """
+ Extracts HTTP facet tags from the triggering event
+ """
+ http_tags = {}
+ request_context = event.get("requestContext")
+ path = event.get("path")
+ method = event.get("httpMethod")
+ if request_context and request_context.get("stage"):
+ if request_context.get("domainName"):
+ http_tags["http.url"] = request_context.get("domainName")
+
+ path = request_context.get("path")
+ method = request_context.get("httpMethod")
+ # Version 2.0 HTTP API Gateway
+ apigateway_v2_http = request_context.get("http")
+ if event.get("version") == "2.0" and apigateway_v2_http:
+ path = apigateway_v2_http.get("path")
+ method = apigateway_v2_http.get("method")
+
+ if path:
+ http_tags["http.url_details.path"] = path
+ if method:
+ http_tags["http.method"] = method
+
+ headers = event.get("headers")
+ if headers and headers.get("Referer"):
+ http_tags["http.referer"] = headers.get("Referer")
+
+ return http_tags
+
+
+def extract_trigger_tags(event: dict, context: Any) -> dict:
+ """
+ Parses the trigger event object to get tags to be added to the span metadata
+ """
+ trigger_tags = {}
+ event_source = parse_event_source(event)
+ if event_source.to_string() is not None and event_source.to_string() != "unknown":
+ trigger_tags["function_trigger.event_source"] = event_source.to_string()
+
+ event_source_arn = get_event_source_arn(event_source, event, context)
+ if event_source_arn:
+ trigger_tags["function_trigger.event_source_arn"] = event_source_arn
+
+ if event_source.event_type in [
+ EventTypes.API_GATEWAY,
+ EventTypes.ALB,
+ EventTypes.LAMBDA_FUNCTION_URL,
+ ]:
+ trigger_tags.update(extract_http_tags(event))
+
+ return trigger_tags
+
+
+def extract_http_status_code_tag(trigger_tags, response):
+ """
+ If the Lambda was triggered by API Gateway, Lambda Function URL, or ALB,
+ add the returned status code as a tag to the function execution span.
+ """
+ if trigger_tags is None:
+ return
+ str_event_source = trigger_tags.get("function_trigger.event_source")
+ # it would be cleaner if each event type was a constant object that
+ # knew some properties about itself like this.
+ str_http_triggers = [
+ et.value
+ for et in [
+ EventTypes.API_GATEWAY,
+ EventTypes.LAMBDA_FUNCTION_URL,
+ EventTypes.ALB,
+ ]
+ ]
+ if str_event_source not in str_http_triggers:
+ return
+
+ status_code = "200"
+ if response is None:
+ # Return a 502 status if no response is found
+ status_code = "502"
+ elif hasattr(response, "get"):
+ status_code = response.get("statusCode")
+ elif hasattr(response, "status_code"):
+ status_code = response.status_code
+
+ return str(status_code)
diff --git a/lambdas/aws-dd-forwarder-3.127.0/datadog_lambda/wrapper.py b/lambdas/aws-dd-forwarder-3.127.0/datadog_lambda/wrapper.py
new file mode 100644
index 0000000..73d1788
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/datadog_lambda/wrapper.py
@@ -0,0 +1,395 @@
+# Unless explicitly stated otherwise all files in this repository are licensed
+# under the Apache License Version 2.0.
+# This product includes software developed at Datadog (https://www.datadoghq.com/).
+# Copyright 2019 Datadog, Inc.
+import base64
+import os
+import logging
+import traceback
+from importlib import import_module
+import json
+from time import time_ns
+
+from datadog_lambda.extension import should_use_extension, flush_extension
+from datadog_lambda.cold_start import (
+ set_cold_start,
+ is_cold_start,
+ is_proactive_init,
+ is_new_sandbox,
+ ColdStartTracer,
+)
+from datadog_lambda.constants import (
+ TraceContextSource,
+ XraySubsegment,
+ Headers,
+ TraceHeader,
+)
+from datadog_lambda.metric import (
+ flush_stats,
+ submit_invocations_metric,
+ submit_errors_metric,
+)
+from datadog_lambda.module_name import modify_module_name
+from datadog_lambda.patch import patch_all
+from datadog_lambda.tracing import (
+ extract_dd_trace_context,
+ create_dd_dummy_metadata_subsegment,
+ inject_correlation_ids,
+ dd_tracing_enabled,
+ mark_trace_as_error_for_5xx_responses,
+ set_correlation_ids,
+ set_dd_trace_py_root,
+ create_function_execution_span,
+ create_inferred_span,
+ InferredSpanInfo,
+ is_authorizer_response,
+ tracer,
+)
+from datadog_lambda.trigger import (
+ extract_trigger_tags,
+ extract_http_status_code_tag,
+)
+
+profiling_env_var = os.environ.get("DD_PROFILING_ENABLED", "false").lower() == "true"
+if profiling_env_var:
+ from ddtrace.profiling import profiler
+
+logger = logging.getLogger(__name__)
+
+DD_FLUSH_TO_LOG = "DD_FLUSH_TO_LOG"
+DD_LOGS_INJECTION = "DD_LOGS_INJECTION"
+DD_MERGE_XRAY_TRACES = "DD_MERGE_XRAY_TRACES"
+AWS_LAMBDA_FUNCTION_NAME = "AWS_LAMBDA_FUNCTION_NAME"
+DD_LOCAL_TEST = "DD_LOCAL_TEST"
+DD_TRACE_EXTRACTOR = "DD_TRACE_EXTRACTOR"
+DD_TRACE_MANAGED_SERVICES = "DD_TRACE_MANAGED_SERVICES"
+DD_ENCODE_AUTHORIZER_CONTEXT = "DD_ENCODE_AUTHORIZER_CONTEXT"
+DD_DECODE_AUTHORIZER_CONTEXT = "DD_DECODE_AUTHORIZER_CONTEXT"
+DD_COLD_START_TRACING = "DD_COLD_START_TRACING"
+DD_MIN_COLD_START_DURATION = "DD_MIN_COLD_START_DURATION"
+DD_COLD_START_TRACE_SKIP_LIB = "DD_COLD_START_TRACE_SKIP_LIB"
+DD_CAPTURE_LAMBDA_PAYLOAD = "DD_CAPTURE_LAMBDA_PAYLOAD"
+DD_CAPTURE_LAMBDA_PAYLOAD_MAX_DEPTH = "DD_CAPTURE_LAMBDA_PAYLOAD_MAX_DEPTH"
+DD_REQUESTS_SERVICE_NAME = "DD_REQUESTS_SERVICE_NAME"
+DD_SERVICE = "DD_SERVICE"
+DD_ENV = "DD_ENV"
+
+
+def get_env_as_int(env_key, default_value: int) -> int:
+ try:
+ return int(os.environ.get(env_key, default_value))
+ except Exception as e:
+ logger.warn(
+ f"Failed to parse {env_key} as int. Using default value: {default_value}. Error: {e}"
+ )
+ return default_value
+
+
+dd_capture_lambda_payload_enabled = (
+ os.environ.get(DD_CAPTURE_LAMBDA_PAYLOAD, "false").lower() == "true"
+)
+
+if dd_capture_lambda_payload_enabled:
+ import datadog_lambda.tag_object as tag_object
+
+ tag_object.max_depth = get_env_as_int(
+ DD_CAPTURE_LAMBDA_PAYLOAD_MAX_DEPTH, tag_object.max_depth
+ )
+
+env_env_var = os.environ.get(DD_ENV, None)
+
+init_timestamp_ns = time_ns()
+
+"""
+Usage:
+
+import requests
+from datadog_lambda.wrapper import datadog_lambda_wrapper
+from datadog_lambda.metric import lambda_metric
+
+@datadog_lambda_wrapper
+def my_lambda_handle(event, context):
+ lambda_metric("my_metric", 10)
+ requests.get("https://www.datadoghq.com")
+"""
+
+
+class _NoopDecorator(object):
+ def __init__(self, func):
+ self.func = func
+
+ def __call__(self, *args, **kwargs):
+ return self.func(*args, **kwargs)
+
+
+class _LambdaDecorator(object):
+ """
+ Decorator to automatically initialize Datadog API client, flush metrics,
+ and extracts/injects trace context.
+ """
+
+ _force_wrap = False
+
+ def __new__(cls, func):
+ """
+ If the decorator is accidentally applied to the same function multiple times,
+ wrap only once.
+
+ If _force_wrap, always return a real decorator, useful for unit tests.
+ """
+ try:
+ if cls._force_wrap or not isinstance(func, _LambdaDecorator):
+ wrapped = super(_LambdaDecorator, cls).__new__(cls)
+ logger.debug("datadog_lambda_wrapper wrapped")
+ return wrapped
+ else:
+ logger.debug("datadog_lambda_wrapper already wrapped")
+ return _NoopDecorator(func)
+ except Exception as e:
+ logger.error(format_err_with_traceback(e))
+ return func
+
+ def __init__(self, func):
+ """Executes when the wrapped function gets wrapped"""
+ try:
+ self.func = func
+ self.flush_to_log = os.environ.get(DD_FLUSH_TO_LOG, "").lower() == "true"
+ self.logs_injection = (
+ os.environ.get(DD_LOGS_INJECTION, "true").lower() == "true"
+ )
+ self.merge_xray_traces = (
+ os.environ.get(DD_MERGE_XRAY_TRACES, "false").lower() == "true"
+ )
+ self.function_name = os.environ.get(AWS_LAMBDA_FUNCTION_NAME, "function")
+ self.service = os.environ.get(DD_SERVICE, None)
+ self.extractor_env = os.environ.get(DD_TRACE_EXTRACTOR, None)
+ self.trace_extractor = None
+ self.span = None
+ self.inferred_span = None
+ depends_on_dd_tracing_enabled = (
+ lambda original_boolean: dd_tracing_enabled and original_boolean
+ )
+ self.make_inferred_span = depends_on_dd_tracing_enabled(
+ os.environ.get(DD_TRACE_MANAGED_SERVICES, "true").lower() == "true"
+ )
+ self.encode_authorizer_context = depends_on_dd_tracing_enabled(
+ os.environ.get(DD_ENCODE_AUTHORIZER_CONTEXT, "true").lower() == "true"
+ )
+ self.decode_authorizer_context = depends_on_dd_tracing_enabled(
+ os.environ.get(DD_DECODE_AUTHORIZER_CONTEXT, "true").lower() == "true"
+ )
+ self.cold_start_tracing = depends_on_dd_tracing_enabled(
+ os.environ.get(DD_COLD_START_TRACING, "true").lower() == "true"
+ )
+ self.min_cold_start_trace_duration = get_env_as_int(
+ DD_MIN_COLD_START_DURATION, 3
+ )
+ self.local_testing_mode = os.environ.get(
+ DD_LOCAL_TEST, "false"
+ ).lower() in ("true", "1")
+ self.cold_start_trace_skip_lib = [
+ "ddtrace.internal.compat",
+ "ddtrace.filters",
+ ]
+ if DD_COLD_START_TRACE_SKIP_LIB in os.environ:
+ try:
+ self.cold_start_trace_skip_lib = os.environ[
+ DD_COLD_START_TRACE_SKIP_LIB
+ ].split(",")
+ except Exception:
+ logger.debug(f"Malformatted for env {DD_COLD_START_TRACE_SKIP_LIB}")
+ self.response = None
+ if profiling_env_var:
+ self.prof = profiler.Profiler(env=env_env_var, service=self.service)
+ if self.extractor_env:
+ extractor_parts = self.extractor_env.rsplit(".", 1)
+ if len(extractor_parts) == 2:
+ (mod_name, extractor_name) = extractor_parts
+ modified_extractor_name = modify_module_name(mod_name)
+ extractor_module = import_module(modified_extractor_name)
+ self.trace_extractor = getattr(extractor_module, extractor_name)
+
+ # Inject trace correlation ids to logs
+ if self.logs_injection:
+ inject_correlation_ids()
+
+ # This prevents a breaking change in ddtrace v0.49 regarding the service name
+ # in requests-related spans
+ os.environ[DD_REQUESTS_SERVICE_NAME] = os.environ.get(
+ DD_SERVICE, "aws.lambda"
+ )
+ # Patch third-party libraries for tracing
+ patch_all()
+
+ logger.debug("datadog_lambda_wrapper initialized")
+ except Exception as e:
+ logger.error(format_err_with_traceback(e))
+
+ def __call__(self, event, context, **kwargs):
+ """Executes when the wrapped function gets called"""
+ self._before(event, context)
+ try:
+ self.response = self.func(event, context, **kwargs)
+ return self.response
+ except Exception:
+ submit_errors_metric(context)
+ if self.span:
+ self.span.set_traceback()
+ raise
+ finally:
+ self._after(event, context)
+
+ def _inject_authorizer_span_headers(self, request_id):
+ reference_span = self.inferred_span if self.inferred_span else self.span
+ assert reference_span.finished
+ # the finish_time_ns should be set as the end of the inferred span if it exist
+ # or the end of the current span
+ finish_time_ns = (
+ reference_span.start_ns + reference_span.duration_ns
+ if reference_span is not None
+ and hasattr(reference_span, "start_ns")
+ and hasattr(reference_span, "duration_ns")
+ else time_ns()
+ )
+ injected_headers = {}
+ source_span = self.inferred_span if self.inferred_span else self.span
+ span_context = source_span.context
+ injected_headers[TraceHeader.TRACE_ID] = str(span_context.trace_id)
+ injected_headers[TraceHeader.PARENT_ID] = str(span_context.span_id)
+ sampling_priority = span_context.sampling_priority
+ if sampling_priority is not None:
+ injected_headers[TraceHeader.SAMPLING_PRIORITY] = str(
+ span_context.sampling_priority
+ )
+ injected_headers[Headers.Parent_Span_Finish_Time] = finish_time_ns
+ if request_id is not None:
+ injected_headers[Headers.Authorizing_Request_Id] = request_id
+ datadog_data = base64.b64encode(json.dumps(injected_headers).encode()).decode()
+ self.response.setdefault("context", {})
+ self.response["context"]["_datadog"] = datadog_data
+
+ def _before(self, event, context):
+ try:
+ self.response = None
+ set_cold_start(init_timestamp_ns)
+ submit_invocations_metric(context)
+ self.trigger_tags = extract_trigger_tags(event, context)
+ # Extract Datadog trace context and source from incoming requests
+ dd_context, trace_context_source, event_source = extract_dd_trace_context(
+ event,
+ context,
+ extractor=self.trace_extractor,
+ decode_authorizer_context=self.decode_authorizer_context,
+ )
+ self.event_source = event_source
+ # Create a Datadog X-Ray subsegment with the trace context
+ if dd_context and trace_context_source == TraceContextSource.EVENT:
+ create_dd_dummy_metadata_subsegment(
+ {
+ "trace-id": str(dd_context.trace_id),
+ "parent-id": str(dd_context.span_id),
+ "sampling-priority": str(dd_context.sampling_priority),
+ },
+ XraySubsegment.TRACE_KEY,
+ )
+
+ if dd_tracing_enabled:
+ set_dd_trace_py_root(trace_context_source, self.merge_xray_traces)
+ if self.make_inferred_span:
+ self.inferred_span = create_inferred_span(
+ event, context, event_source, self.decode_authorizer_context
+ )
+ self.span = create_function_execution_span(
+ context,
+ self.function_name,
+ is_cold_start(),
+ is_proactive_init(),
+ trace_context_source,
+ self.merge_xray_traces,
+ self.trigger_tags,
+ parent_span=self.inferred_span,
+ )
+ else:
+ set_correlation_ids()
+ if profiling_env_var and is_new_sandbox():
+ self.prof.start(stop_on_exit=False, profile_children=True)
+ logger.debug("datadog_lambda_wrapper _before() done")
+ except Exception as e:
+ logger.error(format_err_with_traceback(e))
+
+ def _after(self, event, context):
+ try:
+ status_code = extract_http_status_code_tag(self.trigger_tags, self.response)
+ if status_code:
+ self.trigger_tags["http.status_code"] = status_code
+ mark_trace_as_error_for_5xx_responses(context, status_code, self.span)
+
+ # Create a new dummy Datadog subsegment for function trigger tags so we
+ # can attach them to X-Ray spans when hybrid tracing is used
+ if self.trigger_tags:
+ create_dd_dummy_metadata_subsegment(
+ self.trigger_tags, XraySubsegment.LAMBDA_FUNCTION_TAGS_KEY
+ )
+ should_trace_cold_start = self.cold_start_tracing and is_new_sandbox()
+ if should_trace_cold_start:
+ trace_ctx = tracer.current_trace_context()
+
+ if self.span:
+ if dd_capture_lambda_payload_enabled:
+ tag_object.tag_object(self.span, "function.request", event)
+ tag_object.tag_object(self.span, "function.response", self.response)
+
+ if status_code:
+ self.span.set_tag("http.status_code", status_code)
+ self.span.finish()
+
+ if self.inferred_span:
+ if status_code:
+ self.inferred_span.set_tag("http.status_code", status_code)
+
+ if self.service:
+ self.inferred_span.set_tag("peer.service", self.service)
+
+ if InferredSpanInfo.is_async(self.inferred_span) and self.span:
+ self.inferred_span.finish(finish_time=self.span.start)
+ else:
+ self.inferred_span.finish()
+
+ if should_trace_cold_start:
+ try:
+ following_span = self.span or self.inferred_span
+ ColdStartTracer(
+ tracer,
+ self.function_name,
+ following_span.start_ns,
+ trace_ctx,
+ self.min_cold_start_trace_duration,
+ self.cold_start_trace_skip_lib,
+ ).trace()
+ except Exception as e:
+ logger.debug("Failed to create cold start spans. %s", e)
+
+ if not self.flush_to_log or should_use_extension:
+ flush_stats()
+ if should_use_extension and self.local_testing_mode:
+ # when testing locally, the extension does not know when an
+ # invocation completes because it does not have access to the
+ # logs api
+ flush_extension()
+
+ if self.encode_authorizer_context and is_authorizer_response(self.response):
+ self._inject_authorizer_span_headers(
+ event.get("requestContext", {}).get("requestId")
+ )
+ logger.debug("datadog_lambda_wrapper _after() done")
+ except Exception as e:
+ logger.error(format_err_with_traceback(e))
+
+
+def format_err_with_traceback(e):
+ return "Error {}. Traceback: {}".format(
+ e, traceback.format_exc().replace("\n", "\r")
+ )
+
+
+datadog_lambda_wrapper = _LambdaDecorator
diff --git a/lambdas/aws-dd-forwarder-3.127.0/datadog_lambda/xray.py b/lambdas/aws-dd-forwarder-3.127.0/datadog_lambda/xray.py
new file mode 100644
index 0000000..88d108f
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/datadog_lambda/xray.py
@@ -0,0 +1,118 @@
+import os
+import logging
+import json
+import binascii
+import time
+import socket
+
+from datadog_lambda.constants import XrayDaemon, XraySubsegment, TraceContextSource
+
+logger = logging.getLogger(__name__)
+
+
+def get_xray_host_port(address):
+ if address == "":
+ logger.debug("X-Ray daemon env var not set, not sending sub-segment")
+ return None
+ parts = address.split(":")
+ if len(parts) <= 1:
+ logger.debug("X-Ray daemon env var not set, not sending sub-segment")
+ return None
+ port = int(parts[1])
+ host = parts[0]
+ return (host, port)
+
+
+def send(host_port_tuple, payload):
+ sock = None
+ try:
+ sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
+ sock.setblocking(0)
+ sock.connect(host_port_tuple)
+ sock.send(payload.encode("utf-8"))
+ except Exception as e_send:
+ logger.error("Error occurred submitting to xray daemon: %s", str(e_send))
+ try:
+ sock.close()
+ except Exception as e_close:
+ logger.error("Error while closing the socket: %s", str(e_close))
+
+
+def build_segment_payload(payload):
+ if payload is None:
+ return None
+ return '{"format": "json", "version": 1}' + "\n" + payload
+
+
+def parse_xray_header(raw_trace_id):
+ # Example:
+ # Root=1-5e272390-8c398be037738dc042009320;Parent=94ae789b969f1cc5;Sampled=1;Lineage=c6c5b1b9:0
+ logger.debug("Reading trace context from env var %s", raw_trace_id)
+ if len(raw_trace_id) == 0:
+ return None
+ parts = raw_trace_id.split(";")
+ if len(parts) < 3:
+ return None
+ root = parts[0].replace("Root=", "")
+ parent = parts[1].replace("Parent=", "")
+ sampled = parts[2].replace("Sampled=", "")
+ if (
+ len(root) == len(parts[0])
+ or len(parent) == len(parts[1])
+ or len(sampled) == len(parts[2])
+ ):
+ return None
+ return {
+ "parent_id": parent,
+ "trace_id": root,
+ "sampled": sampled,
+ "source": TraceContextSource.XRAY,
+ }
+
+
+def generate_random_id():
+ return binascii.b2a_hex(os.urandom(8)).decode("utf-8")
+
+
+def build_segment(context, key, metadata):
+ segment = json.dumps(
+ {
+ "id": generate_random_id(),
+ "trace_id": context["trace_id"],
+ "parent_id": context["parent_id"],
+ "name": XraySubsegment.NAME,
+ "start_time": time.time(),
+ "end_time": time.time(),
+ "type": "subsegment",
+ "metadata": {
+ XraySubsegment.NAMESPACE: {
+ key: metadata,
+ }
+ },
+ }
+ )
+ return segment
+
+
+def send_segment(key, metadata):
+ host_port_tuple = get_xray_host_port(
+ os.environ.get(XrayDaemon.XRAY_DAEMON_ADDRESS, "")
+ )
+ if host_port_tuple is None:
+ return None
+ context = parse_xray_header(
+ os.environ.get(XrayDaemon.XRAY_TRACE_ID_HEADER_NAME, "")
+ )
+ if context is None:
+ logger.debug(
+ "Failed to create segment since it was not possible to get trace context from header"
+ )
+ return None
+
+ # Skip adding segment, if the xray trace is going to be sampled away.
+ if context["sampled"] == "0":
+ logger.debug("Skipping sending metadata, x-ray trace was sampled out")
+ return None
+ segment = build_segment(context, key, metadata)
+ segment_payload = build_segment_payload(segment)
+ send(host_port_tuple, segment_payload)
diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddsketch-2.0.4.dist-info/INSTALLER b/lambdas/aws-dd-forwarder-3.127.0/ddsketch-2.0.4.dist-info/INSTALLER
new file mode 100644
index 0000000..a1b589e
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/ddsketch-2.0.4.dist-info/INSTALLER
@@ -0,0 +1 @@
+pip
diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddsketch-2.0.4.dist-info/LICENSE b/lambdas/aws-dd-forwarder-3.127.0/ddsketch-2.0.4.dist-info/LICENSE
new file mode 100644
index 0000000..7e153db
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/ddsketch-2.0.4.dist-info/LICENSE
@@ -0,0 +1,13 @@
+Copyright 2020 DataDog, Inc.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddsketch-2.0.4.dist-info/LICENSE-3rdparty.csv b/lambdas/aws-dd-forwarder-3.127.0/ddsketch-2.0.4.dist-info/LICENSE-3rdparty.csv
new file mode 100644
index 0000000..66c2263
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/ddsketch-2.0.4.dist-info/LICENSE-3rdparty.csv
@@ -0,0 +1,3 @@
+Component,Origin,License,Copyright
+import,numpy,BSD-3-Clause,Copyright (c) 2005-2020 NumPy Developers.; All rights reserved.
+import,setuptools,MIT,Copyright (c) 2016 Jason R Coombs
diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddsketch-2.0.4.dist-info/METADATA b/lambdas/aws-dd-forwarder-3.127.0/ddsketch-2.0.4.dist-info/METADATA
new file mode 100644
index 0000000..3716a35
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/ddsketch-2.0.4.dist-info/METADATA
@@ -0,0 +1,166 @@
+Metadata-Version: 2.1
+Name: ddsketch
+Version: 2.0.4
+Summary: Distributed quantile sketches
+Home-page: http://github.com/datadog/sketches-py
+Download-URL: https://github.com/DataDog/sketches-py/archive/v1.0.tar.gz
+Author: Jee Rim, Charles-Philippe Masson, Homin Lee
+Author-email: jee.rim@datadoghq.com, charles.masson@datadoghq.com, homin@datadoghq.com
+Keywords: ddsketch,quantile,sketch
+Classifier: Programming Language :: Python :: 3
+Classifier: License :: OSI Approved :: Apache Software License
+Requires-Python: >=2.7
+Description-Content-Type: text/markdown
+License-File: LICENSE
+License-File: LICENSE-3rdparty.csv
+License-File: NOTICE
+Requires-Dist: six
+Requires-Dist: typing ; python_version < "3.5"
+Requires-Dist: protobuf (<4.21.0,>=3.0.0) ; python_version < "3.7"
+Requires-Dist: protobuf (>=3.0.0) ; python_version >= "3.7"
+
+# ddsketch
+
+This repo contains the Python implementation of the distributed quantile sketch
+algorithm DDSketch [1]. DDSketch has relative-error guarantees for any quantile
+q in [0, 1]. That is if the true value of the qth-quantile is `x` then DDSketch
+returns a value `y` such that `|x-y| / x < e` where `e` is the relative error
+parameter. (The default here is set to 0.01.) DDSketch is also fully mergeable,
+meaning that multiple sketches from distributed systems can be combined in a
+central node.
+
+Our default implementation, `DDSketch`, is guaranteed [1] to not grow too large
+in size for any data that can be described by a distribution whose tails are
+sub-exponential.
+
+We also provide implementations (`LogCollapsingLowestDenseDDSketch` and
+`LogCollapsingHighestDenseDDSketch`) where the q-quantile will be accurate up to
+the specified relative error for q that is not too small (or large). Concretely,
+the q-quantile will be accurate up to the specified relative error as long as it
+belongs to one of the `m` bins kept by the sketch. If the data is time in
+seconds, the default of `m = 2048` covers 80 microseconds to 1 year.
+
+## Installation
+
+To install this package, run `pip install ddsketch`, or clone the repo and run
+`python setup.py install`. This package depends on `numpy` and `protobuf`. (The
+protobuf dependency can be removed if it's not applicable.)
+
+## Usage
+```
+from ddsketch import DDSketch
+
+sketch = DDSketch()
+```
+Add values to the sketch
+```
+import numpy as np
+
+values = np.random.normal(size=500)
+for v in values:
+ sketch.add(v)
+```
+Find the quantiles of `values` to within the relative error.
+```
+quantiles = [sketch.get_quantile_value(q) for q in [0.5, 0.75, 0.9, 1]]
+```
+Merge another `DDSketch` into `sketch`.
+```
+another_sketch = DDSketch()
+other_values = np.random.normal(size=500)
+for v in other_values:
+ another_sketch.add(v)
+sketch.merge(another_sketch)
+```
+The quantiles of `values` concatenated with `other_values` are still accurate to within the relative error.
+
+## Development
+
+To work on ddsketch a Python interpreter must be installed. It is recommended to use the provided development
+container (requires [docker](https://www.docker.com/)) which includes all the required Python interpreters.
+
+ docker-compose run dev
+
+Or, if developing outside of docker then it is recommended to use a virtual environment:
+
+ pip install virtualenv
+ virtualenv --python=3 .venv
+ source .venv/bin/activate
+
+
+### Testing
+
+To run the tests install `riot`:
+
+ pip install riot
+
+Replace the Python version with the interpreter(s) available.
+
+ # Run tests with Python 3.9
+ riot run -p3.9 test
+
+### Release notes
+
+New features, bug fixes, deprecations and other breaking changes must have
+release notes included.
+
+To generate a release note for the change:
+
+ riot run reno new
+
+Edit the generated file to include notes on the changes made in the commit/PR
+and add commit it.
+
+
+### Formatting
+
+Format code with
+
+ riot run fmt
+
+
+### Type-checking
+
+Type checking is done with [mypy](http://mypy-lang.org/):
+
+ riot run mypy
+
+
+### Type-checking
+
+Lint the code with [flake8](https://flake8.pycqa.org/en/latest/):
+
+ riot run flake8
+
+
+### Protobuf
+
+The protobuf is stored in the go repository: https://github.com/DataDog/sketches-go/blob/master/ddsketch/pb/ddsketch.proto
+
+Install the minimum required protoc and generate the Python code:
+
+```sh
+docker run -v $PWD:/code -it ubuntu:18.04 /bin/bash
+apt update && apt install protobuf-compiler # default is 3.0.0
+protoc --proto_path=ddsketch/pb/ --python_out=ddsketch/pb/ ddsketch/pb/ddsketch.proto
+```
+
+
+### Releasing
+
+1. Generate the release notes and use [`pandoc`](https://pandoc.org/) to format
+them for Github:
+```bash
+ git checkout master && git pull
+ riot run -s reno report --no-show-source | pandoc -f rst -t gfm --wrap=none
+```
+ Copy the output into a new release: https://github.com/DataDog/sketches-py/releases/new.
+
+2. Enter a tag for the release (following [`semver`](https://semver.org)) (eg. `v1.1.3`, `v1.0.3`, `v1.2.0`).
+3. Use the tag without the `v` as the title.
+4. Save the release as a draft and pass the link to someone else to give a quick review.
+5. If all looks good hit publish
+
+
+## References
+[1] Charles Masson and Jee E Rim and Homin K. Lee. DDSketch: A fast and fully-mergeable quantile sketch with relative-error guarantees. PVLDB, 12(12): 2195-2205, 2019. (The code referenced in the paper, including our implementation of the the Greenwald-Khanna (GK) algorithm, can be found at: https://github.com/DataDog/sketches-py/releases/tag/v0.1 )
diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddsketch-2.0.4.dist-info/NOTICE b/lambdas/aws-dd-forwarder-3.127.0/ddsketch-2.0.4.dist-info/NOTICE
new file mode 100644
index 0000000..035c9ad
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/ddsketch-2.0.4.dist-info/NOTICE
@@ -0,0 +1,4 @@
+Datadog sketches-py
+Copyright 2020 Datadog, Inc.
+
+This product includes software developed at Datadog (https://www.datadoghq.com/).
diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddsketch-2.0.4.dist-info/RECORD b/lambdas/aws-dd-forwarder-3.127.0/ddsketch-2.0.4.dist-info/RECORD
new file mode 100644
index 0000000..d951ffa
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/ddsketch-2.0.4.dist-info/RECORD
@@ -0,0 +1,30 @@
+ddsketch-2.0.4.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
+ddsketch-2.0.4.dist-info/LICENSE,sha256=T0-WFEYXjD5IYjlmlH0JAbfqTHa_YvAl875ydhaqdKA,554
+ddsketch-2.0.4.dist-info/LICENSE-3rdparty.csv,sha256=z16O1RgqAgDTZqLSzos-HZ1vqH1cqaR6Vm9qDz5Fhuc,201
+ddsketch-2.0.4.dist-info/METADATA,sha256=cMuoMMwqZ0i1m3iMwqDBdgsWokDVcY4BzUxZj0cZe68,5456
+ddsketch-2.0.4.dist-info/NOTICE,sha256=rVyH-sbkieAzCC_Ni4rDz0feqaG-7tWHwlj1aIfH33Q,132
+ddsketch-2.0.4.dist-info/RECORD,,
+ddsketch-2.0.4.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+ddsketch-2.0.4.dist-info/WHEEL,sha256=G16H4A3IeoQmnOrYV4ueZGKSjhipXx8zc8nu9FGlvMA,92
+ddsketch-2.0.4.dist-info/top_level.txt,sha256=pPjwA4dRqDmExS6WSOHicanUpoHB-b9852iZwDUkNcI,9
+ddsketch/__init__.py,sha256=IYdg8QtcIE_l66Ze1TiC18XaVZwAR9k8YOSY_2rkMYs,717
+ddsketch/__pycache__/__init__.cpython-311.pyc,,
+ddsketch/__pycache__/__version.cpython-311.pyc,,
+ddsketch/__pycache__/_version.cpython-311.pyc,,
+ddsketch/__pycache__/ddsketch.cpython-311.pyc,,
+ddsketch/__pycache__/mapping.cpython-311.pyc,,
+ddsketch/__pycache__/store.cpython-311.pyc,,
+ddsketch/__version.py,sha256=O-_Pobu2r8B57xjWutMfz4LJDMMon23xJAyWJe3yPAQ,176
+ddsketch/_version.py,sha256=HUdEupMtRv7sb1QCczoihjq-kz5jF4rDbew15qDFB-g,504
+ddsketch/ddsketch.py,sha256=ha-eunudNhueaVJGDB9VF13QGhi1W1ncqAto5XDGtnI,11444
+ddsketch/mapping.py,sha256=FBs2PdhLAQB3F28GpWfpAGRpDcnt-FM-n8fIdtC0JYM,7759
+ddsketch/pb/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+ddsketch/pb/__pycache__/__init__.cpython-311.pyc,,
+ddsketch/pb/__pycache__/ddsketch_pb2.cpython-311.pyc,,
+ddsketch/pb/__pycache__/ddsketch_pre319_pb2.cpython-311.pyc,,
+ddsketch/pb/__pycache__/proto.cpython-311.pyc,,
+ddsketch/pb/ddsketch_pb2.py,sha256=y_5bB9hMZyDZQtNvSzRRxMua-BYBtarjSA6qLkuyA54,3580
+ddsketch/pb/ddsketch_pre319_pb2.py,sha256=lqYZ8DaWHl6Sgb5MW2rb4ztJQbE7Su-g5yqkClTmICA,10087
+ddsketch/pb/proto.py,sha256=S9PYWyGTpmB2XI_RJIdxLw9VA-b_OB1YQ-P7VXHcvNk,3315
+ddsketch/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+ddsketch/store.py,sha256=2yZvF78s65blAon-TuS_vRJiea4nD71P9Bo01rUuP7w,17515
diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddsketch-2.0.4.dist-info/REQUESTED b/lambdas/aws-dd-forwarder-3.127.0/ddsketch-2.0.4.dist-info/REQUESTED
new file mode 100644
index 0000000..e69de29
diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddsketch-2.0.4.dist-info/WHEEL b/lambdas/aws-dd-forwarder-3.127.0/ddsketch-2.0.4.dist-info/WHEEL
new file mode 100644
index 0000000..becc9a6
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/ddsketch-2.0.4.dist-info/WHEEL
@@ -0,0 +1,5 @@
+Wheel-Version: 1.0
+Generator: bdist_wheel (0.37.1)
+Root-Is-Purelib: true
+Tag: py3-none-any
+
diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddsketch-2.0.4.dist-info/top_level.txt b/lambdas/aws-dd-forwarder-3.127.0/ddsketch-2.0.4.dist-info/top_level.txt
new file mode 100644
index 0000000..292b4e0
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/ddsketch-2.0.4.dist-info/top_level.txt
@@ -0,0 +1 @@
+ddsketch
diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddsketch/.DS_Store b/lambdas/aws-dd-forwarder-3.127.0/ddsketch/.DS_Store
new file mode 100644
index 0000000..c5e11fb
Binary files /dev/null and b/lambdas/aws-dd-forwarder-3.127.0/ddsketch/.DS_Store differ
diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddsketch/__init__.py b/lambdas/aws-dd-forwarder-3.127.0/ddsketch/__init__.py
new file mode 100644
index 0000000..fcf1e10
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/ddsketch/__init__.py
@@ -0,0 +1,24 @@
+from ._version import get_version
+from .ddsketch import DDSketch
+from .ddsketch import LogCollapsingHighestDenseDDSketch
+from .ddsketch import LogCollapsingLowestDenseDDSketch
+from .mapping import CubicallyInterpolatedMapping
+from .mapping import LinearlyInterpolatedMapping
+from .mapping import LogarithmicMapping
+from .store import CollapsingHighestDenseStore
+from .store import CollapsingLowestDenseStore
+
+
+__version__ = get_version()
+
+
+__all__ = [
+ "DDSketch",
+ "LogCollapsingLowestDenseDDSketch",
+ "LogCollapsingHighestDenseDDSketch",
+ "CubicallyInterpolatedMapping",
+ "LinearlyInterpolatedMapping",
+ "LogarithmicMapping",
+ "CollapsingHighestDenseStore",
+ "CollapsingLowestDenseStore",
+]
diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddsketch/__version.py b/lambdas/aws-dd-forwarder-3.127.0/ddsketch/__version.py
new file mode 100644
index 0000000..51765be
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/ddsketch/__version.py
@@ -0,0 +1,5 @@
+# coding: utf-8
+# file generated by setuptools_scm
+# don't change, don't track in version control
+__version__ = version = '2.0.4'
+__version_tuple__ = version_tuple = (2, 0, 4)
diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddsketch/_version.py b/lambdas/aws-dd-forwarder-3.127.0/ddsketch/_version.py
new file mode 100644
index 0000000..1c65a0d
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/ddsketch/_version.py
@@ -0,0 +1,17 @@
+def get_version():
+ # type: () -> str
+ """Return the package version.
+
+ The write_to functionality of setuptools_scm is used (see setup.py)
+ to output the version to ddsketch/__version.py which we attempt to import.
+
+ This is done to avoid the expensive overhead of importing pkg_resources.
+ """
+ try:
+ from .__version import version
+
+ return version
+ except ImportError:
+ import pkg_resources
+
+ return pkg_resources.get_distribution(__name__).version
diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddsketch/ddsketch.py b/lambdas/aws-dd-forwarder-3.127.0/ddsketch/ddsketch.py
new file mode 100644
index 0000000..ba72562
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/ddsketch/ddsketch.py
@@ -0,0 +1,316 @@
+# Unless explicitly stated otherwise all files in this repository are licensed
+# under the Apache License 2.0.
+# This product includes software developed at Datadog (https://www.datadoghq.com/).
+# Copyright 2020 Datadog, Inc.
+
+"""A quantile sketch with relative-error guarantees. This sketch computes
+quantile values with an approximation error that is relative to the actual
+quantile value. It works on both negative and non-negative input values.
+
+For instance, using DDSketch with a relative accuracy guarantee set to 1%, if
+the expected quantile value is 100, the computed quantile value is guaranteed to
+be between 99 and 101. If the expected quantile value is 1000, the computed
+quantile value is guaranteed to be between 990 and 1010.
+
+DDSketch works by mapping floating-point input values to bins and counting the
+number of values for each bin. The underlying structure that keeps track of bin
+counts is store.
+
+The memory size of the sketch depends on the range that is covered by the input
+values: the larger that range, the more bins are needed to keep track of the
+input values. As a rough estimate, if working on durations with a relative
+accuracy of 2%, about 2kB (275 bins) are needed to cover values between 1
+millisecond and 1 minute, and about 6kB (802 bins) to cover values between 1
+nanosecond and 1 day.
+
+The size of the sketch can be have a fail-safe upper-bound by using collapsing
+stores. As shown in
+the DDSketch paper
+the likelihood of a store collapsing when using the default bound is vanishingly
+small for most data.
+
+DDSketch implementations are also available in:
+Go
+Python
+JavaScript
+"""
+import typing
+
+from .mapping import LogarithmicMapping
+from .store import CollapsingHighestDenseStore
+from .store import CollapsingLowestDenseStore
+from .store import DenseStore
+
+
+if typing.TYPE_CHECKING:
+ from typing import Optional
+
+ from .mapping import KeyMapping
+ from .store import Store
+
+
+DEFAULT_REL_ACC = 0.01 # "alpha" in the paper
+DEFAULT_BIN_LIMIT = 2048
+
+
+class BaseDDSketch(object):
+ """The base implementation of DDSketch with neither mapping nor storage specified.
+
+ Args:
+ mapping (mapping.KeyMapping): map btw values and store bins
+ store (store.Store): storage for positive values
+ negative_store (store.Store): storage for negative values
+ zero_count (float): The count of zero values
+
+ Attributes:
+ relative_accuracy (float): the accuracy guarantee; referred to as alpha
+ in the paper. (0. < alpha < 1.)
+
+ count: the number of values seen by the sketch
+ min: the minimum value seen by the sketch
+ max: the maximum value seen by the sketch
+ sum: the sum of the values seen by the sketch
+ """
+
+ def __init__(
+ self,
+ mapping,
+ store,
+ negative_store,
+ zero_count,
+ ):
+ # type: (KeyMapping, Store, Store, float) -> None
+ self._mapping = mapping
+ self._store = store
+ self._negative_store = negative_store
+ self._zero_count = zero_count
+
+ self._relative_accuracy = mapping.relative_accuracy
+ self._count = self._negative_store.count + self._zero_count + self._store.count
+ self._min = float("+inf")
+ self._max = float("-inf")
+ self._sum = 0.0
+
+ def __repr__(self):
+ # type: () -> str
+ return (
+ "store: {}, negative_store: {}, "
+ "zero_count: {}, count: {}, "
+ "sum: {}, min: {}, max: {}"
+ ).format(
+ self._store,
+ self._negative_store,
+ self._zero_count,
+ self._count,
+ self._sum,
+ self._min,
+ self._max,
+ )
+
+ @property
+ def count(self):
+ return self._count
+
+ @property
+ def name(self):
+ # type: () -> str
+ """str: name of the sketch"""
+ return "DDSketch"
+
+ @property
+ def num_values(self):
+ # type: () -> float
+ """Return the number of values in the sketch."""
+ return self._count
+
+ @property
+ def avg(self):
+ # type: () -> float
+ """Return the exact average of the values added to the sketch."""
+ return self._sum / self._count
+
+ @property
+ def sum(self): # noqa: A003
+ # type: () -> float
+ """Return the exact sum of the values added to the sketch."""
+ return self._sum
+
+ def add(self, val, weight=1.0):
+ # type: (float, float) -> None
+ """Add a value to the sketch."""
+ if weight <= 0.0:
+ raise ValueError("weight must be a positive float, got %r" % weight)
+
+ if val > self._mapping.min_possible:
+ self._store.add(self._mapping.key(val), weight)
+ elif val < -self._mapping.min_possible:
+ self._negative_store.add(self._mapping.key(-val), weight)
+ else:
+ self._zero_count += weight
+
+ # Keep track of summary stats
+ self._count += weight
+ self._sum += val * weight
+ if val < self._min:
+ self._min = val
+ if val > self._max:
+ self._max = val
+
+ def get_quantile_value(self, quantile):
+ # type: (float) -> Optional[float]
+ """Return the approximate value at the specified quantile.
+
+ Args:
+ quantile (float): 0 <= q <=1
+
+ Returns:
+ the value at the specified quantile or None if the sketch is empty
+ """
+ if quantile < 0 or quantile > 1 or self._count == 0:
+ return None
+
+ rank = quantile * (self._count - 1)
+ if rank < self._negative_store.count:
+ reversed_rank = self._negative_store.count - rank - 1
+ key = self._negative_store.key_at_rank(reversed_rank, lower=False)
+ quantile_value = -self._mapping.value(key)
+ elif rank < self._zero_count + self._negative_store.count:
+ return 0
+ else:
+ key = self._store.key_at_rank(
+ rank - self._zero_count - self._negative_store.count
+ )
+ quantile_value = self._mapping.value(key)
+ return quantile_value
+
+ def merge(self, sketch):
+ # type: (BaseDDSketch) -> None
+ """Merge the given sketch into this one. After this operation, this sketch
+ encodes the values that were added to both this and the input sketch.
+ """
+ if not self._mergeable(sketch):
+ raise ValueError(
+ "Cannot merge two DDSketches with different parameters, got %r and %r"
+ % (self._mapping.gamma, sketch._mapping.gamma)
+ )
+
+ if sketch.count == 0:
+ return
+
+ if self._count == 0:
+ self._copy(sketch)
+ return
+
+ # Merge the stores
+ self._store.merge(sketch._store)
+ self._negative_store.merge(sketch._negative_store)
+ self._zero_count += sketch._zero_count
+
+ # Merge summary stats
+ self._count += sketch._count
+ self._sum += sketch._sum
+ if sketch._min < self._min:
+ self._min = sketch._min
+ if sketch._max > self._max:
+ self._max = sketch._max
+
+ def _mergeable(self, other):
+ # type: (BaseDDSketch) -> bool
+ """Two sketches can be merged only if their gammas are equal."""
+ return self._mapping.gamma == other._mapping.gamma
+
+ def _copy(self, sketch):
+ # type: (BaseDDSketch) -> None
+ """Copy the input sketch into this one"""
+ self._store.copy(sketch._store)
+ self._negative_store.copy(sketch._negative_store)
+ self._zero_count = sketch._zero_count
+ self._min = sketch._min
+ self._max = sketch._max
+ self._count = sketch._count
+ self._sum = sketch._sum
+
+
+class DDSketch(BaseDDSketch):
+ """The default implementation of BaseDDSketch, with optimized memory usage at
+ the cost of lower ingestion speed, using an unlimited number of bins. The
+ number of bins will not exceed a reasonable number unless the data is
+ distributed with tails heavier than any subexponential.
+ (cf. http://www.vldb.org/pvldb/vol12/p2195-masson.pdf)
+ """
+
+ def __init__(self, relative_accuracy=None):
+ # type: (Optional[float]) -> None
+ # Make sure the parameters are valid
+ if relative_accuracy is None:
+ relative_accuracy = DEFAULT_REL_ACC
+
+ mapping = LogarithmicMapping(relative_accuracy)
+ store = DenseStore()
+ negative_store = DenseStore()
+ super(DDSketch, self).__init__(
+ mapping=mapping,
+ store=store,
+ negative_store=negative_store,
+ zero_count=0.0,
+ )
+
+
+class LogCollapsingLowestDenseDDSketch(BaseDDSketch):
+ """Implementation of BaseDDSketch with optimized memory usage at the cost of
+ lower ingestion speed, using a limited number of bins. When the maximum
+ number of bins is reached, bins with lowest indices are collapsed, which
+ causes the relative accuracy to be lost on the lowest quantiles. For the
+ default bin limit, collapsing is unlikely to occur unless the data is
+ distributed with tails heavier than any subexponential.
+ (cf. http://www.vldb.org/pvldb/vol12/p2195-masson.pdf)
+ """
+
+ def __init__(self, relative_accuracy=None, bin_limit=None):
+ # type: (Optional[float], Optional[int]) -> None
+ # Make sure the parameters are valid
+ if relative_accuracy is None:
+ relative_accuracy = DEFAULT_REL_ACC
+
+ if bin_limit is None or bin_limit < 0:
+ bin_limit = DEFAULT_BIN_LIMIT
+
+ mapping = LogarithmicMapping(relative_accuracy)
+ store = CollapsingLowestDenseStore(bin_limit)
+ negative_store = CollapsingLowestDenseStore(bin_limit)
+ super(LogCollapsingLowestDenseDDSketch, self).__init__(
+ mapping=mapping,
+ store=store,
+ negative_store=negative_store,
+ zero_count=0.0,
+ )
+
+
+class LogCollapsingHighestDenseDDSketch(BaseDDSketch):
+ """Implementation of BaseDDSketch with optimized memory usage at the cost of
+ lower ingestion speed, using a limited number of bins. When the maximum
+ number of bins is reached, bins with highest indices are collapsed, which
+ causes the relative accuracy to be lost on the highest quantiles. For the
+ default bin limit, collapsing is unlikely to occur unless the data is
+ distributed with tails heavier than any subexponential.
+ (cf. http://www.vldb.org/pvldb/vol12/p2195-masson.pdf)
+ """
+
+ def __init__(self, relative_accuracy=None, bin_limit=None):
+ # type: (Optional[float], Optional[int]) -> None
+ # Make sure the parameters are valid
+ if relative_accuracy is None:
+ relative_accuracy = DEFAULT_REL_ACC
+
+ if bin_limit is None or bin_limit < 0:
+ bin_limit = DEFAULT_BIN_LIMIT
+
+ mapping = LogarithmicMapping(relative_accuracy)
+ store = CollapsingHighestDenseStore(bin_limit)
+ negative_store = CollapsingHighestDenseStore(bin_limit)
+ super(LogCollapsingHighestDenseDDSketch, self).__init__(
+ mapping=mapping,
+ store=store,
+ negative_store=negative_store,
+ zero_count=0.0,
+ )
diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddsketch/mapping.py b/lambdas/aws-dd-forwarder-3.127.0/ddsketch/mapping.py
new file mode 100644
index 0000000..4599385
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/ddsketch/mapping.py
@@ -0,0 +1,216 @@
+from __future__ import division
+
+
+# Unless explicitly stated otherwise all files in this repository are licensed
+# under the Apache License 2.0.
+# This product includes software developed at Datadog (https://www.datadoghq.com/).
+# Copyright 2020 Datadog, Inc.
+
+"""A mapping between values and integer indices that imposes relative accuracy
+guarantees. Specifically, for any value `minIndexableValue() < value <
+maxIndexableValue` implementations of `KeyMapping` must be such that
+`value(key(v))` is close to `v` with a relative error that is less than
+`relative_accuracy`.
+
+In implementations of KeyMapping, there is generally a trade-off between the
+cost of computing the key and the number of keys that are required to cover a
+given range of values (memory optimality). The most memory-optimal mapping is
+the LogarithmicMapping, but it requires the costly evaluation of the logarithm
+when computing the index. Other mappings can approximate the logarithmic
+mapping, while being less computationally costly.
+"""
+from abc import ABCMeta
+from abc import abstractmethod
+import math
+import sys
+
+import six
+
+
+class KeyMapping(six.with_metaclass(ABCMeta)):
+ """
+ Args:
+ relative_accuracy (float): the accuracy guarantee; referred to as alpha
+ in the paper. (0. < alpha < 1.)
+ offset (float): an offset that can be used to shift all bin keys
+ Attributes:
+ gamma (float): the base for the exponential buckets. gamma = (1 + alpha) / (1 - alpha)
+ min_possible: the smallest value the sketch can distinguish from 0
+ max_possible: the largest value the sketch can handle
+ _multiplier (float): used for calculating log_gamma(value) initially, _multiplier = 1 / log(gamma)
+ """
+
+ def __init__(self, relative_accuracy, offset=0.0):
+ # type: (float, float) -> None
+ if relative_accuracy <= 0 or relative_accuracy >= 1:
+ raise ValueError(
+ "Relative accuracy must be between 0 and 1, got %r" % relative_accuracy
+ )
+ self.relative_accuracy = relative_accuracy
+ self._offset = offset
+
+ gamma_mantissa = 2 * relative_accuracy / (1 - relative_accuracy)
+ self.gamma = 1 + gamma_mantissa
+ self._multiplier = 1 / math.log1p(gamma_mantissa)
+ self.min_possible = sys.float_info.min * self.gamma
+ self.max_possible = sys.float_info.max / self.gamma
+
+ @classmethod
+ def from_gamma_offset(cls, gamma, offset):
+ # type: (float, float) -> KeyMapping
+ """Constructor used by pb.proto"""
+ relative_accuracy = (gamma - 1.0) / (gamma + 1.0)
+ return cls(relative_accuracy, offset=offset)
+
+ @abstractmethod
+ def _log_gamma(self, value):
+ # type: (float) -> float
+ """Return (an approximation of) the logarithm of the value base gamma"""
+
+ @abstractmethod
+ def _pow_gamma(self, value):
+ # type: (float) -> float
+ """Return (an approximation of) gamma to the power value"""
+
+ def key(self, value):
+ # type: (float) -> int
+ """
+ Args:
+ value (float)
+ Returns:
+ int: the key specifying the bucket for value
+ """
+ return int(math.ceil(self._log_gamma(value)) + self._offset)
+
+ def value(self, key):
+ # type: (int) -> float
+ """
+ Args:
+ key (int)
+ Returns:
+ float: the value represented by the bucket specified by the key
+ """
+ return self._pow_gamma(key - self._offset) * (2.0 / (1 + self.gamma))
+
+
+class LogarithmicMapping(KeyMapping):
+ """A memory-optimal KeyMapping, i.e., given a targeted relative accuracy, it
+ requires the least number of keys to cover a given range of values. This is
+ done by logarithmically mapping floating-point values to integers.
+ """
+
+ def __init__(self, relative_accuracy, offset=0.0):
+ # type: (float, float) -> None
+ super(LogarithmicMapping, self).__init__(relative_accuracy, offset=offset)
+ self._multiplier *= math.log(2)
+
+ def _log_gamma(self, value):
+ # type: (float) -> float
+ return math.log(value, 2) * self._multiplier
+
+ def _pow_gamma(self, value):
+ # type: (float) -> float
+ return math.pow(2.0, value / self._multiplier)
+
+
+def _cbrt(x):
+ # type: (float) -> float
+ y = float(abs(x) ** (1.0 / 3.0))
+ if x < 0:
+ return -y
+ return y
+
+
+class LinearlyInterpolatedMapping(KeyMapping):
+ """A fast KeyMapping that approximates the memory-optimal
+ LogarithmicMapping by extracting the floor value of the logarithm to the
+ base 2 from the binary representations of floating-point values and
+ linearly interpolating the logarithm in-between.
+ """
+
+ def _log2_approx(self, value):
+ # type: (float) -> float
+ """Approximates log2 by s + f
+ where v = (s+1) * 2 ** f for s in [0, 1)
+
+ frexp(v) returns m and e s.t.
+ v = m * 2 ** e ; (m in [0.5, 1) or 0.0)
+ so we adjust m and e accordingly
+ """
+ mantissa, exponent = math.frexp(value)
+ significand = 2 * mantissa - 1
+ return significand + (exponent - 1)
+
+ def _exp2_approx(self, value):
+ # type: (float) -> float
+ """Inverse of _log2_approx"""
+ exponent = int(math.floor(value) + 1)
+ mantissa = (value - exponent + 2) / 2.0
+ return math.ldexp(mantissa, exponent)
+
+ def _log_gamma(self, value):
+ # type: (float) -> float
+ return self._log2_approx(value) * self._multiplier
+
+ def _pow_gamma(self, value):
+ # type: (float) -> float
+ return self._exp2_approx(value / self._multiplier)
+
+
+class CubicallyInterpolatedMapping(KeyMapping):
+ """A fast KeyMapping that approximates the memory-optimal LogarithmicMapping by
+ extracting the floor value of the logarithm to the base 2 from the binary
+ representations of floating-point values and cubically interpolating the
+ logarithm in-between.
+
+ More detailed documentation of this method can be found in:
+ sketches-java
+ """
+
+ A = 6.0 / 35.0
+ B = -3.0 / 5.0
+ C = 10.0 / 7.0
+
+ def __init__(self, relative_accuracy, offset=0.0):
+ # type: (float, float) -> None
+ super(CubicallyInterpolatedMapping, self).__init__(
+ relative_accuracy, offset=offset
+ )
+ self._multiplier /= self.C
+
+ def _cubic_log2_approx(self, value):
+ # type: (float) -> float
+ """Approximates log2 using a cubic polynomial"""
+ mantissa, exponent = math.frexp(value)
+ significand = 2 * mantissa - 1
+ return (
+ (self.A * significand + self.B) * significand + self.C
+ ) * significand + (exponent - 1)
+
+ def _cubic_exp2_approx(self, value):
+ # type: (float) -> float
+ # Derived from Cardano's formula
+ exponent = int(math.floor(value))
+ delta_0 = self.B * self.B - 3 * self.A * self.C
+ delta_1 = (
+ 2.0 * self.B * self.B * self.B
+ - 9.0 * self.A * self.B * self.C
+ - 27.0 * self.A * self.A * (value - exponent)
+ )
+ cardano = _cbrt(
+ (delta_1 - ((delta_1 * delta_1 - 4 * delta_0 * delta_0 * delta_0) ** 0.5))
+ / 2.0
+ )
+ significand_plus_one = (
+ -(self.B + cardano + delta_0 / cardano) / (3.0 * self.A) + 1.0
+ )
+ mantissa = significand_plus_one / 2
+ return math.ldexp(mantissa, exponent + 1)
+
+ def _log_gamma(self, value):
+ # type: (float) -> float
+ return self._cubic_log2_approx(value) * self._multiplier
+
+ def _pow_gamma(self, value):
+ # type: (float) -> float
+ return self._cubic_exp2_approx(value / self._multiplier)
diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddsketch/pb/__init__.py b/lambdas/aws-dd-forwarder-3.127.0/ddsketch/pb/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddsketch/pb/ddsketch_pb2.py b/lambdas/aws-dd-forwarder-3.127.0/ddsketch/pb/ddsketch_pb2.py
new file mode 100644
index 0000000..81525b2
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/ddsketch/pb/ddsketch_pb2.py
@@ -0,0 +1,72 @@
+# -*- coding: utf-8 -*-
+# Generated by the protocol buffer compiler. DO NOT EDIT!
+# source: ddsketch.proto
+"""Generated protocol buffer code."""
+from google.protobuf import descriptor as _descriptor
+from google.protobuf import descriptor_pool as _descriptor_pool
+from google.protobuf import message as _message
+from google.protobuf import reflection as _reflection
+from google.protobuf import symbol_database as _symbol_database
+# @@protoc_insertion_point(imports)
+
+_sym_db = _symbol_database.Default()
+
+
+
+
+DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x0e\x64\x64sketch.proto\"}\n\x08\x44\x44Sketch\x12\x1e\n\x07mapping\x18\x01 \x01(\x0b\x32\r.IndexMapping\x12\x1e\n\x0epositiveValues\x18\x02 \x01(\x0b\x32\x06.Store\x12\x1e\n\x0enegativeValues\x18\x03 \x01(\x0b\x32\x06.Store\x12\x11\n\tzeroCount\x18\x04 \x01(\x01\"\xa7\x01\n\x0cIndexMapping\x12\r\n\x05gamma\x18\x01 \x01(\x01\x12\x13\n\x0bindexOffset\x18\x02 \x01(\x01\x12\x32\n\rinterpolation\x18\x03 \x01(\x0e\x32\x1b.IndexMapping.Interpolation\"?\n\rInterpolation\x12\x08\n\x04NONE\x10\x00\x12\n\n\x06LINEAR\x10\x01\x12\r\n\tQUADRATIC\x10\x02\x12\t\n\x05\x43UBIC\x10\x03\"\xa6\x01\n\x05Store\x12(\n\tbinCounts\x18\x01 \x03(\x0b\x32\x15.Store.BinCountsEntry\x12\x1f\n\x13\x63ontiguousBinCounts\x18\x02 \x03(\x01\x42\x02\x10\x01\x12 \n\x18\x63ontiguousBinIndexOffset\x18\x03 \x01(\x11\x1a\x30\n\x0e\x42inCountsEntry\x12\x0b\n\x03key\x18\x01 \x01(\x11\x12\r\n\x05value\x18\x02 \x01(\x01:\x02\x38\x01\x62\x06proto3')
+
+
+
+_DDSKETCH = DESCRIPTOR.message_types_by_name['DDSketch']
+_INDEXMAPPING = DESCRIPTOR.message_types_by_name['IndexMapping']
+_STORE = DESCRIPTOR.message_types_by_name['Store']
+_STORE_BINCOUNTSENTRY = _STORE.nested_types_by_name['BinCountsEntry']
+_INDEXMAPPING_INTERPOLATION = _INDEXMAPPING.enum_types_by_name['Interpolation']
+DDSketch = _reflection.GeneratedProtocolMessageType('DDSketch', (_message.Message,), {
+ 'DESCRIPTOR' : _DDSKETCH,
+ '__module__' : 'ddsketch_pb2'
+ # @@protoc_insertion_point(class_scope:DDSketch)
+ })
+_sym_db.RegisterMessage(DDSketch)
+
+IndexMapping = _reflection.GeneratedProtocolMessageType('IndexMapping', (_message.Message,), {
+ 'DESCRIPTOR' : _INDEXMAPPING,
+ '__module__' : 'ddsketch_pb2'
+ # @@protoc_insertion_point(class_scope:IndexMapping)
+ })
+_sym_db.RegisterMessage(IndexMapping)
+
+Store = _reflection.GeneratedProtocolMessageType('Store', (_message.Message,), {
+
+ 'BinCountsEntry' : _reflection.GeneratedProtocolMessageType('BinCountsEntry', (_message.Message,), {
+ 'DESCRIPTOR' : _STORE_BINCOUNTSENTRY,
+ '__module__' : 'ddsketch_pb2'
+ # @@protoc_insertion_point(class_scope:Store.BinCountsEntry)
+ })
+ ,
+ 'DESCRIPTOR' : _STORE,
+ '__module__' : 'ddsketch_pb2'
+ # @@protoc_insertion_point(class_scope:Store)
+ })
+_sym_db.RegisterMessage(Store)
+_sym_db.RegisterMessage(Store.BinCountsEntry)
+
+if _descriptor._USE_C_DESCRIPTORS == False:
+
+ DESCRIPTOR._options = None
+ _STORE_BINCOUNTSENTRY._options = None
+ _STORE_BINCOUNTSENTRY._serialized_options = b'8\001'
+ _STORE.fields_by_name['contiguousBinCounts']._options = None
+ _STORE.fields_by_name['contiguousBinCounts']._serialized_options = b'\020\001'
+ _DDSKETCH._serialized_start=18
+ _DDSKETCH._serialized_end=143
+ _INDEXMAPPING._serialized_start=146
+ _INDEXMAPPING._serialized_end=313
+ _INDEXMAPPING_INTERPOLATION._serialized_start=250
+ _INDEXMAPPING_INTERPOLATION._serialized_end=313
+ _STORE._serialized_start=316
+ _STORE._serialized_end=482
+ _STORE_BINCOUNTSENTRY._serialized_start=434
+ _STORE_BINCOUNTSENTRY._serialized_end=482
+# @@protoc_insertion_point(module_scope)
diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddsketch/pb/ddsketch_pre319_pb2.py b/lambdas/aws-dd-forwarder-3.127.0/ddsketch/pb/ddsketch_pre319_pb2.py
new file mode 100644
index 0000000..4a6d3ef
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/ddsketch/pb/ddsketch_pre319_pb2.py
@@ -0,0 +1,283 @@
+# Generated by the protocol buffer compiler. DO NOT EDIT!
+# source: ddsketch.proto
+
+import sys
+_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
+from google.protobuf import descriptor as _descriptor
+from google.protobuf import message as _message
+from google.protobuf import reflection as _reflection
+from google.protobuf import symbol_database as _symbol_database
+from google.protobuf import descriptor_pb2
+# @@protoc_insertion_point(imports)
+
+_sym_db = _symbol_database.Default()
+
+
+
+
+DESCRIPTOR = _descriptor.FileDescriptor(
+ name='ddsketch.proto',
+ package='',
+ syntax='proto3',
+ serialized_pb=_b('\n\x0e\x64\x64sketch.proto\"}\n\x08\x44\x44Sketch\x12\x1e\n\x07mapping\x18\x01 \x01(\x0b\x32\r.IndexMapping\x12\x1e\n\x0epositiveValues\x18\x02 \x01(\x0b\x32\x06.Store\x12\x1e\n\x0enegativeValues\x18\x03 \x01(\x0b\x32\x06.Store\x12\x11\n\tzeroCount\x18\x04 \x01(\x01\"\xa7\x01\n\x0cIndexMapping\x12\r\n\x05gamma\x18\x01 \x01(\x01\x12\x13\n\x0bindexOffset\x18\x02 \x01(\x01\x12\x32\n\rinterpolation\x18\x03 \x01(\x0e\x32\x1b.IndexMapping.Interpolation\"?\n\rInterpolation\x12\x08\n\x04NONE\x10\x00\x12\n\n\x06LINEAR\x10\x01\x12\r\n\tQUADRATIC\x10\x02\x12\t\n\x05\x43UBIC\x10\x03\"\xa6\x01\n\x05Store\x12(\n\tbinCounts\x18\x01 \x03(\x0b\x32\x15.Store.BinCountsEntry\x12\x1f\n\x13\x63ontiguousBinCounts\x18\x02 \x03(\x01\x42\x02\x10\x01\x12 \n\x18\x63ontiguousBinIndexOffset\x18\x03 \x01(\x11\x1a\x30\n\x0e\x42inCountsEntry\x12\x0b\n\x03key\x18\x01 \x01(\x11\x12\r\n\x05value\x18\x02 \x01(\x01:\x02\x38\x01\x62\x06proto3')
+)
+_sym_db.RegisterFileDescriptor(DESCRIPTOR)
+
+
+
+_INDEXMAPPING_INTERPOLATION = _descriptor.EnumDescriptor(
+ name='Interpolation',
+ full_name='IndexMapping.Interpolation',
+ filename=None,
+ file=DESCRIPTOR,
+ values=[
+ _descriptor.EnumValueDescriptor(
+ name='NONE', index=0, number=0,
+ options=None,
+ type=None),
+ _descriptor.EnumValueDescriptor(
+ name='LINEAR', index=1, number=1,
+ options=None,
+ type=None),
+ _descriptor.EnumValueDescriptor(
+ name='QUADRATIC', index=2, number=2,
+ options=None,
+ type=None),
+ _descriptor.EnumValueDescriptor(
+ name='CUBIC', index=3, number=3,
+ options=None,
+ type=None),
+ ],
+ containing_type=None,
+ options=None,
+ serialized_start=250,
+ serialized_end=313,
+)
+_sym_db.RegisterEnumDescriptor(_INDEXMAPPING_INTERPOLATION)
+
+
+_DDSKETCH = _descriptor.Descriptor(
+ name='DDSketch',
+ full_name='DDSketch',
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name='mapping', full_name='DDSketch.mapping', index=0,
+ number=1, type=11, cpp_type=10, label=1,
+ has_default_value=False, default_value=None,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ _descriptor.FieldDescriptor(
+ name='positiveValues', full_name='DDSketch.positiveValues', index=1,
+ number=2, type=11, cpp_type=10, label=1,
+ has_default_value=False, default_value=None,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ _descriptor.FieldDescriptor(
+ name='negativeValues', full_name='DDSketch.negativeValues', index=2,
+ number=3, type=11, cpp_type=10, label=1,
+ has_default_value=False, default_value=None,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ _descriptor.FieldDescriptor(
+ name='zeroCount', full_name='DDSketch.zeroCount', index=3,
+ number=4, type=1, cpp_type=5, label=1,
+ has_default_value=False, default_value=float(0),
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ ],
+ extensions=[
+ ],
+ nested_types=[],
+ enum_types=[
+ ],
+ options=None,
+ is_extendable=False,
+ syntax='proto3',
+ extension_ranges=[],
+ oneofs=[
+ ],
+ serialized_start=18,
+ serialized_end=143,
+)
+
+
+_INDEXMAPPING = _descriptor.Descriptor(
+ name='IndexMapping',
+ full_name='IndexMapping',
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name='gamma', full_name='IndexMapping.gamma', index=0,
+ number=1, type=1, cpp_type=5, label=1,
+ has_default_value=False, default_value=float(0),
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ _descriptor.FieldDescriptor(
+ name='indexOffset', full_name='IndexMapping.indexOffset', index=1,
+ number=2, type=1, cpp_type=5, label=1,
+ has_default_value=False, default_value=float(0),
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ _descriptor.FieldDescriptor(
+ name='interpolation', full_name='IndexMapping.interpolation', index=2,
+ number=3, type=14, cpp_type=8, label=1,
+ has_default_value=False, default_value=0,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ ],
+ extensions=[
+ ],
+ nested_types=[],
+ enum_types=[
+ _INDEXMAPPING_INTERPOLATION,
+ ],
+ options=None,
+ is_extendable=False,
+ syntax='proto3',
+ extension_ranges=[],
+ oneofs=[
+ ],
+ serialized_start=146,
+ serialized_end=313,
+)
+
+
+_STORE_BINCOUNTSENTRY = _descriptor.Descriptor(
+ name='BinCountsEntry',
+ full_name='Store.BinCountsEntry',
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name='key', full_name='Store.BinCountsEntry.key', index=0,
+ number=1, type=17, cpp_type=1, label=1,
+ has_default_value=False, default_value=0,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ _descriptor.FieldDescriptor(
+ name='value', full_name='Store.BinCountsEntry.value', index=1,
+ number=2, type=1, cpp_type=5, label=1,
+ has_default_value=False, default_value=float(0),
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ ],
+ extensions=[
+ ],
+ nested_types=[],
+ enum_types=[
+ ],
+ options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')),
+ is_extendable=False,
+ syntax='proto3',
+ extension_ranges=[],
+ oneofs=[
+ ],
+ serialized_start=434,
+ serialized_end=482,
+)
+
+_STORE = _descriptor.Descriptor(
+ name='Store',
+ full_name='Store',
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name='binCounts', full_name='Store.binCounts', index=0,
+ number=1, type=11, cpp_type=10, label=3,
+ has_default_value=False, default_value=[],
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ _descriptor.FieldDescriptor(
+ name='contiguousBinCounts', full_name='Store.contiguousBinCounts', index=1,
+ number=2, type=1, cpp_type=5, label=3,
+ has_default_value=False, default_value=[],
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\001'))),
+ _descriptor.FieldDescriptor(
+ name='contiguousBinIndexOffset', full_name='Store.contiguousBinIndexOffset', index=2,
+ number=3, type=17, cpp_type=1, label=1,
+ has_default_value=False, default_value=0,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ ],
+ extensions=[
+ ],
+ nested_types=[_STORE_BINCOUNTSENTRY, ],
+ enum_types=[
+ ],
+ options=None,
+ is_extendable=False,
+ syntax='proto3',
+ extension_ranges=[],
+ oneofs=[
+ ],
+ serialized_start=316,
+ serialized_end=482,
+)
+
+_DDSKETCH.fields_by_name['mapping'].message_type = _INDEXMAPPING
+_DDSKETCH.fields_by_name['positiveValues'].message_type = _STORE
+_DDSKETCH.fields_by_name['negativeValues'].message_type = _STORE
+_INDEXMAPPING.fields_by_name['interpolation'].enum_type = _INDEXMAPPING_INTERPOLATION
+_INDEXMAPPING_INTERPOLATION.containing_type = _INDEXMAPPING
+_STORE_BINCOUNTSENTRY.containing_type = _STORE
+_STORE.fields_by_name['binCounts'].message_type = _STORE_BINCOUNTSENTRY
+DESCRIPTOR.message_types_by_name['DDSketch'] = _DDSKETCH
+DESCRIPTOR.message_types_by_name['IndexMapping'] = _INDEXMAPPING
+DESCRIPTOR.message_types_by_name['Store'] = _STORE
+
+DDSketch = _reflection.GeneratedProtocolMessageType('DDSketch', (_message.Message,), dict(
+ DESCRIPTOR = _DDSKETCH,
+ __module__ = 'ddsketch_pb2'
+ # @@protoc_insertion_point(class_scope:DDSketch)
+ ))
+_sym_db.RegisterMessage(DDSketch)
+
+IndexMapping = _reflection.GeneratedProtocolMessageType('IndexMapping', (_message.Message,), dict(
+ DESCRIPTOR = _INDEXMAPPING,
+ __module__ = 'ddsketch_pb2'
+ # @@protoc_insertion_point(class_scope:IndexMapping)
+ ))
+_sym_db.RegisterMessage(IndexMapping)
+
+Store = _reflection.GeneratedProtocolMessageType('Store', (_message.Message,), dict(
+
+ BinCountsEntry = _reflection.GeneratedProtocolMessageType('BinCountsEntry', (_message.Message,), dict(
+ DESCRIPTOR = _STORE_BINCOUNTSENTRY,
+ __module__ = 'ddsketch_pb2'
+ # @@protoc_insertion_point(class_scope:Store.BinCountsEntry)
+ ))
+ ,
+ DESCRIPTOR = _STORE,
+ __module__ = 'ddsketch_pb2'
+ # @@protoc_insertion_point(class_scope:Store)
+ ))
+_sym_db.RegisterMessage(Store)
+_sym_db.RegisterMessage(Store.BinCountsEntry)
+
+
+_STORE_BINCOUNTSENTRY.has_options = True
+_STORE_BINCOUNTSENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001'))
+_STORE.fields_by_name['contiguousBinCounts'].has_options = True
+_STORE.fields_by_name['contiguousBinCounts']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\001'))
+# @@protoc_insertion_point(module_scope)
diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddsketch/pb/proto.py b/lambdas/aws-dd-forwarder-3.127.0/ddsketch/pb/proto.py
new file mode 100644
index 0000000..ebccfd6
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/ddsketch/pb/proto.py
@@ -0,0 +1,104 @@
+from ddsketch.ddsketch import BaseDDSketch
+from ..mapping import (
+ CubicallyInterpolatedMapping,
+ LinearlyInterpolatedMapping,
+ LogarithmicMapping,
+)
+from ..store import DenseStore
+
+import google.protobuf
+
+
+pb_version = tuple(map(int, google.protobuf.__version__.split(".")[0:2]))
+
+if pb_version >= (3, 19, 0):
+ import ddsketch.pb.ddsketch_pb2 as pb
+else:
+ import ddsketch.pb.ddsketch_pre319_pb2 as pb
+
+
+class KeyMappingProto:
+ @classmethod
+ def _proto_interpolation(cls, mapping):
+ if type(mapping) is LogarithmicMapping:
+ return pb.IndexMapping.NONE
+ if type(mapping) is LinearlyInterpolatedMapping:
+ return pb.IndexMapping.LINEAR
+ if type(mapping) is CubicallyInterpolatedMapping:
+ return pb.IndexMapping.CUBIC
+
+ @classmethod
+ def to_proto(cls, mapping):
+ """serialize to protobuf"""
+ return pb.IndexMapping(
+ gamma=mapping.gamma,
+ indexOffset=mapping._offset,
+ interpolation=cls._proto_interpolation(mapping),
+ )
+
+ @classmethod
+ def from_proto(cls, proto):
+ """deserialize from protobuf"""
+ if proto.interpolation == pb.IndexMapping.NONE:
+ return LogarithmicMapping.from_gamma_offset(proto.gamma, proto.indexOffset)
+ elif proto.interpolation == pb.IndexMapping.LINEAR:
+ return LinearlyInterpolatedMapping.from_gamma_offset(
+ proto.gamma, proto.indexOffset
+ )
+ elif proto.interpolation == pb.IndexMapping.CUBIC:
+ return CubicallyInterpolatedMapping.from_gamma_offset(
+ proto.gamma, proto.indexOffset
+ )
+ else:
+ raise ValueError("Unrecognized interpolation %r" % proto.interpolation)
+
+
+class StoreProto:
+ """Currently only supports DenseStore"""
+
+ @classmethod
+ def to_proto(cls, store):
+ """serialize to protobuf"""
+ return pb.Store(
+ contiguousBinCounts=store.bins, contiguousBinIndexOffset=store.offset
+ )
+
+ @classmethod
+ def from_proto(cls, proto):
+ """deserialize from protobuf"""
+ store = DenseStore()
+ index = proto.contiguousBinIndexOffset
+ store.offset = index
+ for count in proto.contiguousBinCounts:
+ store.add(index, count)
+ index += 1
+ return store
+
+
+class DDSketchProto:
+ @classmethod
+ def to_proto(self, ddsketch):
+ """serialize to protobuf"""
+ return pb.DDSketch(
+ mapping=KeyMappingProto.to_proto(ddsketch._mapping),
+ positiveValues=StoreProto.to_proto(ddsketch._store),
+ negativeValues=StoreProto.to_proto(ddsketch._negative_store),
+ zeroCount=ddsketch._zero_count,
+ )
+
+ @classmethod
+ def from_proto(cls, proto):
+ """deserialize from protobuf
+
+ N.B., The current protobuf loses any min/max/sum/avg information.
+ """
+ mapping = KeyMappingProto.from_proto(proto.mapping)
+ negative_store = StoreProto.from_proto(proto.negativeValues)
+ store = StoreProto.from_proto(proto.positiveValues)
+ zero_count = proto.zeroCount
+ return BaseDDSketch(
+ mapping=mapping,
+ store=store,
+ negative_store=negative_store,
+ zero_count=zero_count,
+ )
diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddsketch/py.typed b/lambdas/aws-dd-forwarder-3.127.0/ddsketch/py.typed
new file mode 100644
index 0000000..e69de29
diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddsketch/store.py b/lambdas/aws-dd-forwarder-3.127.0/ddsketch/store.py
new file mode 100644
index 0000000..b9fbb48
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/ddsketch/store.py
@@ -0,0 +1,504 @@
+from __future__ import division
+
+
+# Unless explicitly stated otherwise all files in this repository are licensed
+# under the Apache License 2.0.
+# This product includes software developed at Datadog (https://www.datadoghq.com/).
+# Copyright 2020 Datadog, Inc.
+
+"""
+Stores map integers to counters. They can be seen as a collection of bins.
+We start with 128 bins and grow the store in chunks of 128 unless specified
+otherwise.
+"""
+
+import abc
+import math
+import typing
+
+
+if typing.TYPE_CHECKING:
+ from typing import List
+ from typing import Optional
+
+import six
+
+
+CHUNK_SIZE = 128
+
+
+class _NegativeIntInfinity(int):
+ def __ge__(self, x):
+ return False
+
+ __gt__ = __ge__
+
+ def __lt__(self, x):
+ return True
+
+ __le__ = __lt__
+
+
+class _PositiveIntInfinity(int):
+ def __ge__(self, x):
+ return True
+
+ __gt__ = __ge__
+
+ def __lt__(self, x):
+ return False
+
+ __le__ = __lt__
+
+
+_neg_infinity = _NegativeIntInfinity()
+_pos_infinity = _PositiveIntInfinity()
+
+
+class Store(six.with_metaclass(abc.ABCMeta)):
+ """The basic specification of a store
+
+ Attributes:
+ count (float): the sum of the counts for the bins
+ min_key (int): the minimum key bin
+ max_key (int): the maximum key bin
+ """
+
+ def __init__(self):
+ # type: () -> None
+ self.count = 0 # type: float
+ self.min_key = _pos_infinity # type: int
+ self.max_key = _neg_infinity # type: int
+
+ @abc.abstractmethod
+ def copy(self, store):
+ """Copies the input store into this one."""
+
+ @abc.abstractmethod
+ def length(self):
+ # type: () -> int
+ """Return the number of bins."""
+
+ @abc.abstractmethod
+ def add(self, key, weight=1.0):
+ # type: (int, float) -> None
+ """Updates the counter at the specified index key, growing the number of bins if
+ necessary.
+ """
+
+ @abc.abstractmethod
+ def key_at_rank(self, rank, lower=True):
+ # type: (float, bool) -> int
+ """Return the key for the value at given rank.
+
+ E.g., if the non-zero bins are [1, 1] for keys a, b with no offset
+
+ if lower = True:
+ key_at_rank(x) = a for x in [0, 1)
+ key_at_rank(x) = b for x in [1, 2)
+
+ if lower = False:
+ key_at_rank(x) = a for x in (-1, 0]
+ key_at_rank(x) = b for x in (0, 1]
+ """
+
+ @abc.abstractmethod
+ def merge(self, store):
+ # type: (Store) -> None
+ """Merge another store into this one. This should be equivalent as running the
+ add operations that have been run on the other store on this one.
+ """
+
+
+class DenseStore(Store):
+ """A dense store that keeps all the bins between the bin for the min_key and the
+ bin for the max_key.
+
+ Args:
+ chunk_size (int, optional): the number of bins to grow by
+
+ Attributes:
+ count (int): the sum of the counts for the bins
+ min_key (int): the minimum key bin
+ max_key (int): the maximum key bin
+ offset (int): the difference btw the keys and the index in which they are stored
+ bins (List[float]): the bins
+ """
+
+ def __init__(self, chunk_size=CHUNK_SIZE):
+ # type: (int) -> None
+ super(DenseStore, self).__init__()
+
+ self.chunk_size = chunk_size # type: int
+ self.offset = 0 # type: int
+ self.bins = [] # type: List[float]
+
+ def __repr__(self):
+ # type: () -> str
+ repr_str = "{"
+ for i, sbin in enumerate(self.bins):
+ repr_str += "%s: %s, " % (i + self.offset, sbin)
+ repr_str += "}}, min_key:%s, max_key:%s, offset:%s" % (
+ self.min_key,
+ self.max_key,
+ self.offset,
+ )
+ return repr_str
+
+ def copy(self, store):
+ # type: (DenseStore) -> None
+ self.bins = store.bins[:]
+ self.count = store.count
+ self.min_key = store.min_key
+ self.max_key = store.max_key
+ self.offset = store.offset
+
+ def length(self):
+ # type: () -> int
+ """Return the number of bins."""
+ return len(self.bins)
+
+ def add(self, key, weight=1.0):
+ # type: (int, float) -> None
+ idx = self._get_index(key)
+ self.bins[idx] += weight
+ self.count += weight
+
+ def _get_index(self, key):
+ # type: (int) -> int
+ """Calculate the bin index for the key, extending the range if necessary."""
+ if key < self.min_key:
+ self._extend_range(key)
+ elif key > self.max_key:
+ self._extend_range(key)
+
+ return key - self.offset
+
+ def _get_new_length(self, new_min_key, new_max_key):
+ # type: (int, int) -> int
+ desired_length = new_max_key - new_min_key + 1
+ return self.chunk_size * int(math.ceil(desired_length / self.chunk_size))
+
+ def _extend_range(self, key, second_key=None):
+ # type: (int, Optional[int]) -> None
+ """Grow the bins as necessary and call _adjust"""
+ if second_key is None:
+ second_key = key
+ new_min_key = min(key, second_key, self.min_key)
+ new_max_key = max(key, second_key, self.max_key)
+
+ if self.length() == 0:
+ # initialize bins
+ self.bins = [0.0] * self._get_new_length(new_min_key, new_max_key)
+ self.offset = new_min_key
+ self._adjust(new_min_key, new_max_key)
+
+ elif new_min_key >= self.min_key and new_max_key < self.offset + self.length():
+ # no need to change the range; just update min/max keys
+ self.min_key = new_min_key
+ self.max_key = new_max_key
+
+ else:
+ # grow the bins
+ new_length = self._get_new_length(new_min_key, new_max_key)
+ if new_length > self.length():
+ self.bins.extend([0.0] * (new_length - self.length()))
+ self._adjust(new_min_key, new_max_key)
+
+ def _adjust(self, new_min_key, new_max_key):
+ # type: (int, int) -> None
+ """Adjust the bins, the offset, the min_key, and max_key, without resizing the
+ bins, in order to try making it fit the specified range.
+ """
+ self._center_bins(new_min_key, new_max_key)
+ self.min_key = new_min_key
+ self.max_key = new_max_key
+
+ def _shift_bins(self, shift):
+ # type: (int) -> None
+ """Shift the bins; this changes the offset."""
+ if shift > 0:
+ self.bins = self.bins[:-shift]
+ self.bins[:0] = [0.0] * shift
+ else:
+ self.bins = self.bins[abs(shift) :]
+ self.bins.extend([0.0] * abs(shift))
+ self.offset -= shift
+
+ def _center_bins(self, new_min_key, new_max_key):
+ # type: (int, int) -> None
+ """Center the bins; this changes the offset."""
+ middle_key = new_min_key + (new_max_key - new_min_key + 1) // 2
+ self._shift_bins(self.offset + self.length() // 2 - middle_key)
+
+ def key_at_rank(self, rank, lower=True):
+ # type: (float, bool) -> int
+ running_ct = 0.0
+ for i, bin_ct in enumerate(self.bins):
+ running_ct += bin_ct
+ if (lower and running_ct > rank) or (not lower and running_ct >= rank + 1):
+ return i + self.offset
+
+ return self.max_key
+
+ def merge(self, store): # type: ignore[override]
+ # type: (DenseStore) -> None
+ if store.count == 0:
+ return
+
+ if self.count == 0:
+ self.copy(store)
+ return
+
+ if store.min_key < self.min_key or store.max_key > self.max_key:
+ self._extend_range(store.min_key, store.max_key)
+
+ for key in range(store.min_key, store.max_key + 1):
+ self.bins[key - self.offset] += store.bins[key - store.offset]
+
+ self.count += store.count
+
+
+class CollapsingLowestDenseStore(DenseStore):
+ """A dense store that keeps all the bins between the bin for the min_key and the
+ bin for the max_key, but collapsing the left-most bins if the number of bins
+ exceeds the bin_limit
+
+ Args:
+ bin_limit (int): the maximum number of bins
+ chunk_size (int, optional): the number of bins to grow by
+
+ Attributes:
+ count (int): the sum of the counts for the bins
+ min_key (int): the minimum key bin
+ max_key (int): the maximum key bin
+ offset (int): the difference btw the keys and the index in which they are stored
+ bins (List[int]): the bins
+ """
+
+ def __init__(self, bin_limit, chunk_size=CHUNK_SIZE):
+ # type: (int, int) -> None
+ super(CollapsingLowestDenseStore, self).__init__()
+ self.bin_limit = bin_limit
+ self.is_collapsed = False
+
+ def copy(self, store): # type: ignore[override]
+ # type: (CollapsingLowestDenseStore) -> None
+ self.bin_limit = store.bin_limit
+ self.is_collapsed = store.is_collapsed
+ super(CollapsingLowestDenseStore, self).copy(store)
+
+ def _get_new_length(self, new_min_key, new_max_key):
+ # type: (int, int) -> int
+ desired_length = new_max_key - new_min_key + 1
+ return min(
+ self.chunk_size * int(math.ceil(desired_length / self.chunk_size)),
+ self.bin_limit,
+ )
+
+ def _get_index(self, key):
+ # type: (int) -> int
+ """Calculate the bin index for the key, extending the range if necessary."""
+ if key < self.min_key:
+ if self.is_collapsed:
+ return 0
+
+ self._extend_range(key)
+ if self.is_collapsed:
+ return 0
+ elif key > self.max_key:
+ self._extend_range(key)
+
+ return key - self.offset
+
+ def _adjust(self, new_min_key, new_max_key):
+ # type: (int, int) -> None
+ """Override. Adjust the bins, the offset, the min_key, and max_key, without
+ resizing the bins, in order to try making it fit the specified
+ range. Collapse to the left if necessary.
+ """
+ if new_max_key - new_min_key + 1 > self.length():
+ # The range of keys is too wide, the lowest bins need to be collapsed.
+ new_min_key = new_max_key - self.length() + 1
+
+ if new_min_key >= self.max_key:
+ # put everything in the first bin
+ self.offset = new_min_key
+ self.min_key = new_min_key
+ self.bins[:] = [0.0] * self.length()
+ self.bins[0] = self.count
+ else:
+ shift = self.offset - new_min_key
+ if shift < 0:
+ collapse_start_index = self.min_key - self.offset
+ collapse_end_index = new_min_key - self.offset
+ collapsed_count = sum(
+ self.bins[collapse_start_index:collapse_end_index]
+ )
+ self.bins[collapse_start_index:collapse_end_index] = [0.0] * (
+ new_min_key - self.min_key
+ )
+ self.bins[collapse_end_index] += collapsed_count
+ self.min_key = new_min_key
+ # shift the buckets to make room for new_max_key
+ self._shift_bins(shift)
+ else:
+ self.min_key = new_min_key
+ # shift the buckets to make room for new_min_key
+ self._shift_bins(shift)
+
+ self.max_key = new_max_key
+ self.is_collapsed = True
+ else:
+ self._center_bins(new_min_key, new_max_key)
+ self.min_key = new_min_key
+ self.max_key = new_max_key
+
+ def merge(self, store): # type: ignore[override]
+ # type: (CollapsingLowestDenseStore) -> None # type: ignore[override]
+ """Override."""
+ if store.count == 0:
+ return
+
+ if self.count == 0:
+ self.copy(store)
+ return
+
+ if store.min_key < self.min_key or store.max_key > self.max_key:
+ self._extend_range(store.min_key, store.max_key)
+
+ collapse_start_idx = store.min_key - store.offset
+ collapse_end_idx = min(self.min_key, store.max_key + 1) - store.offset
+ if collapse_end_idx > collapse_start_idx:
+ collapse_count = sum(store.bins[collapse_start_idx:collapse_end_idx])
+ self.bins[0] += collapse_count
+ else:
+ collapse_end_idx = collapse_start_idx
+
+ for key in range(collapse_end_idx + store.offset, store.max_key + 1):
+ self.bins[key - self.offset] += store.bins[key - store.offset]
+
+ self.count += store.count
+
+
+class CollapsingHighestDenseStore(DenseStore):
+ """A dense store that keeps all the bins between the bin for the min_key and the
+ bin for the max_key, but collapsing the right-most bins if the number of bins
+ exceeds the bin_limit
+
+ Args:
+ bin_limit (int): the maximum number of bins
+ chunk_size (int, optional): the number of bins to grow by
+
+ Attributes:
+ count (int): the sum of the counts for the bins
+ min_key (int): the minimum key bin
+ max_key (int): the maximum key bin
+ offset (int): the difference btw the keys and the index in which they are stored
+ bins (List[int]): the bins
+ """
+
+ def __init__(self, bin_limit, chunk_size=CHUNK_SIZE):
+ super(CollapsingHighestDenseStore, self).__init__()
+ self.bin_limit = bin_limit
+ self.is_collapsed = False
+
+ def copy(self, store): # type: ignore[override]
+ # type: (CollapsingHighestDenseStore) -> None
+ self.bin_limit = store.bin_limit
+ self.is_collapsed = store.is_collapsed
+ super(CollapsingHighestDenseStore, self).copy(store)
+
+ def _get_new_length(self, new_min_key, new_max_key):
+ # type: (int, int) -> int
+ desired_length = new_max_key - new_min_key + 1
+ # For some reason mypy can't infer that min(int, int) is an int, so cast it.
+ return int(
+ min(
+ self.chunk_size * int(math.ceil(desired_length / self.chunk_size)),
+ self.bin_limit,
+ )
+ )
+
+ def _get_index(self, key):
+ # type: (int) -> int
+ """Calculate the bin index for the key, extending the range if necessary"""
+ if key > self.max_key:
+ if self.is_collapsed:
+ return self.length() - 1
+
+ self._extend_range(key)
+ if self.is_collapsed:
+ return self.length() - 1
+ elif key < self.min_key:
+ self._extend_range(key)
+ return key - self.offset
+
+ def _adjust(self, new_min_key, new_max_key):
+ # type: (int, int) -> None
+ """Override. Adjust the bins, the offset, the min_key, and max_key, without
+ resizing the bins, in order to try making it fit the specified
+ range. Collapse to the left if necessary.
+ """
+ if new_max_key - new_min_key + 1 > self.length():
+ # The range of keys is too wide, the lowest bins need to be collapsed.
+ new_max_key = new_min_key + self.length() - 1
+
+ if new_max_key <= self.min_key:
+ # put everything in the last bin
+ self.offset = new_min_key
+ self.max_key = new_max_key
+ self.bins[:] = [0.0] * self.length()
+ self.bins[-1] = self.count
+ else:
+ shift = self.offset - new_min_key
+ if shift > 0:
+ collapse_start_index = new_max_key - self.offset + 1
+ collapse_end_index = self.max_key - self.offset + 1
+ collapsed_count = sum(
+ self.bins[collapse_start_index:collapse_end_index]
+ )
+ self.bins[collapse_start_index:collapse_end_index] = [0.0] * (
+ self.max_key - new_max_key
+ )
+ self.bins[collapse_start_index - 1] += collapsed_count
+ self.max_key = new_max_key
+ # shift the buckets to make room for new_max_key
+ self._shift_bins(shift)
+ else:
+ self.max_key = new_max_key
+ # shift the buckets to make room for new_min_key
+ self._shift_bins(shift)
+
+ self.min_key = new_min_key
+ self.is_collapsed = True
+ else:
+ self._center_bins(new_min_key, new_max_key)
+ self.min_key = new_min_key
+ self.max_key = new_max_key
+
+ def merge(self, store): # type: ignore[override]
+ # type: (CollapsingHighestDenseStore) -> None # type: ignore[override]
+ """Override."""
+ if store.count == 0:
+ return
+
+ if self.count == 0:
+ self.copy(store)
+ return
+
+ if store.min_key < self.min_key or store.max_key > self.max_key:
+ self._extend_range(store.min_key, store.max_key)
+
+ collapse_end_idx = store.max_key - store.offset + 1
+ collapse_start_idx = max(self.max_key + 1, store.min_key) - store.offset
+ if collapse_end_idx > collapse_start_idx:
+ collapse_count = sum(store.bins[collapse_start_idx:collapse_end_idx])
+ self.bins[-1] += collapse_count
+ else:
+ collapse_start_idx = collapse_end_idx
+
+ for key in range(store.min_key, collapse_start_idx + store.offset):
+ self.bins[key - self.offset] += store.bins[key - store.offset]
+
+ self.count += store.count
diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace-2.6.5.dist-info/INSTALLER b/lambdas/aws-dd-forwarder-3.127.0/ddtrace-2.6.5.dist-info/INSTALLER
new file mode 100644
index 0000000..a1b589e
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace-2.6.5.dist-info/INSTALLER
@@ -0,0 +1 @@
+pip
diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace-2.6.5.dist-info/LICENSE b/lambdas/aws-dd-forwarder-3.127.0/ddtrace-2.6.5.dist-info/LICENSE
new file mode 100644
index 0000000..5f8fd63
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace-2.6.5.dist-info/LICENSE
@@ -0,0 +1,6 @@
+## License
+
+This work is dual-licensed under Apache 2.0 or BSD3.
+You may select, at your option, one of the above-listed licenses.
+
+`SPDX-License-Identifier: Apache-2.0 OR BSD-3-Clause`
diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace-2.6.5.dist-info/LICENSE.Apache b/lambdas/aws-dd-forwarder-3.127.0/ddtrace-2.6.5.dist-info/LICENSE.Apache
new file mode 100644
index 0000000..bff56b5
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace-2.6.5.dist-info/LICENSE.Apache
@@ -0,0 +1,200 @@
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+ 1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+ 2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+ 3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+ 4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+ 6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+ 7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+ 8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+ 9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+ END OF TERMS AND CONDITIONS
+
+ APPENDIX: How to apply the Apache License to your work.
+
+ To apply the Apache License to your work, attach the following
+ boilerplate notice, with the fields enclosed by brackets "{}"
+ replaced with your own identifying information. (Don't include
+ the brackets!) The text should be enclosed in the appropriate
+ comment syntax for the file format. We also recommend that a
+ file or class name and description of purpose be included on the
+ same "printed page" as the copyright notice for easier
+ identification within third-party archives.
+
+ Copyright 2016 Datadog, Inc.
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace-2.6.5.dist-info/LICENSE.BSD3 b/lambdas/aws-dd-forwarder-3.127.0/ddtrace-2.6.5.dist-info/LICENSE.BSD3
new file mode 100644
index 0000000..e8f3a81
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace-2.6.5.dist-info/LICENSE.BSD3
@@ -0,0 +1,24 @@
+Copyright (c) 2016, Datadog
+All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are met:
+ * Redistributions of source code must retain the above copyright
+ notice, this list of conditions and the following disclaimer.
+ * Redistributions in binary form must reproduce the above copyright
+ notice, this list of conditions and the following disclaimer in the
+ documentation and/or other materials provided with the distribution.
+ * Neither the name of Datadog nor the
+ names of its contributors may be used to endorse or promote products
+ derived from this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
+ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+DISCLAIMED. IN NO EVENT SHALL DATADOG BE LIABLE FOR ANY
+DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
+(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
+ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
+SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace-2.6.5.dist-info/METADATA b/lambdas/aws-dd-forwarder-3.127.0/ddtrace-2.6.5.dist-info/METADATA
new file mode 100644
index 0000000..fd46eae
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace-2.6.5.dist-info/METADATA
@@ -0,0 +1,68 @@
+Metadata-Version: 2.1
+Name: ddtrace
+Version: 2.6.5
+Summary: Datadog APM client library
+Home-page: https://github.com/DataDog/dd-trace-py
+Author: Datadog, Inc.
+Author-email: "Datadog, Inc."
+License: LICENSE.BSD3
+Project-URL: Bug Tracker, https://github.com/DataDog/dd-trace-py/issues
+Project-URL: Changelog, https://github.com/DataDog/dd-trace-py/releases
+Project-URL: Documentation, https://ddtrace.readthedocs.io/en/stable/
+Project-URL: Homepage, https://github.com/DataDog/dd-trace-py
+Project-URL: Source Code, https://github.com/DataDog/dd-trace-py/
+Classifier: Programming Language :: Python
+Classifier: Programming Language :: Python :: 3.7
+Classifier: Programming Language :: Python :: 3.8
+Classifier: Programming Language :: Python :: 3.9
+Classifier: Programming Language :: Python :: 3.10
+Classifier: Programming Language :: Python :: 3.11
+Classifier: Programming Language :: Python :: 3.12
+Requires-Python: >=3.7
+Description-Content-Type: text/markdown
+License-File: LICENSE
+License-File: LICENSE.Apache
+License-File: LICENSE.BSD3
+License-File: NOTICE
+Requires-Dist: attrs >=20
+Requires-Dist: cattrs
+Requires-Dist: ddsketch >=2.0.1
+Requires-Dist: envier
+Requires-Dist: opentelemetry-api >=1
+Requires-Dist: protobuf >=3
+Requires-Dist: six >=1.12.0
+Requires-Dist: typing-extensions
+Requires-Dist: xmltodict >=0.12
+Requires-Dist: importlib-metadata <=6.5.0 ; python_version < "3.8"
+Requires-Dist: bytecode ~=0.13.0 ; python_version == "3.7"
+Requires-Dist: setuptools ; python_version >= "3.12"
+Requires-Dist: bytecode ; python_version >= "3.8"
+Provides-Extra: opentracing
+Requires-Dist: opentracing >=2.0.0 ; extra == 'opentracing'
+
+# `ddtrace`
+
+[![CircleCI](https://circleci.com/gh/DataDog/dd-trace-py/tree/main.svg?style=svg)](https://circleci.com/gh/DataDog/dd-trace-py/tree/main)
+[![PypiVersions](https://img.shields.io/pypi/v/ddtrace.svg)](https://pypi.org/project/ddtrace/)
+[![Pyversions](https://img.shields.io/pypi/pyversions/ddtrace.svg?style=flat)](https://pypi.org/project/ddtrace/)
+
+
+
+This library powers [Distributed Tracing](https://docs.datadoghq.com/tracing/),
+ [Continuous Profiling](https://docs.datadoghq.com/tracing/profiler/),
+ [Error Tracking](https://docs.datadoghq.com/tracing/error_tracking/),
+ [Continuous Integration Visibility](https://docs.datadoghq.com/continuous_integration/),
+ [Deployment Tracking](https://docs.datadoghq.com/tracing/deployment_tracking/),
+ [Code Hotspots](https://docs.datadoghq.com/tracing/profiler/connect_traces_and_profiles/),
+ [Dynamic Instrumentation](https://docs.datadoghq.com/dynamic_instrumentation/),
+ and more.
+
+To get started with tracing, check out the [product documentation][setup docs] or the [glossary][visualization docs].
+
+For advanced usage and configuration information, check out the [library documentation][api docs].
+
+To get started as a contributor, see [the contributing docs](https://ddtrace.readthedocs.io/en/stable/contributing.html) first.
+
+[setup docs]: https://docs.datadoghq.com/tracing/setup/python/
+[api docs]: https://ddtrace.readthedocs.io/
+[visualization docs]: https://docs.datadoghq.com/tracing/visualization/
diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace-2.6.5.dist-info/NOTICE b/lambdas/aws-dd-forwarder-3.127.0/ddtrace-2.6.5.dist-info/NOTICE
new file mode 100644
index 0000000..732c748
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace-2.6.5.dist-info/NOTICE
@@ -0,0 +1,4 @@
+Datadog dd-trace-py
+Copyright 2016-Present Datadog, Inc.
+
+This product includes software developed at Datadog, Inc. (https://www.datadoghq.com/).
diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace-2.6.5.dist-info/RECORD b/lambdas/aws-dd-forwarder-3.127.0/ddtrace-2.6.5.dist-info/RECORD
new file mode 100644
index 0000000..57017c5
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace-2.6.5.dist-info/RECORD
@@ -0,0 +1,1391 @@
+../../bin/ddtrace-run,sha256=-2hcsI92sxn_Du4qKKiIz_MEFpezi6TBsB7AVMn47U8,236
+ddtrace-2.6.5.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
+ddtrace-2.6.5.dist-info/LICENSE,sha256=OZvn-IQ0kjk6HmSP8Yi2WQqZBzGRJQmvBO5w9KBdD3o,186
+ddtrace-2.6.5.dist-info/LICENSE.Apache,sha256=5V2RruBHZQIcPyceiv51DjjvdvhgsgS4pnXAOHDuZkQ,11342
+ddtrace-2.6.5.dist-info/LICENSE.BSD3,sha256=J9S_Tq-hhvteDV2W8R0rqht5DZHkmvgdx3gnLZg4j6Q,1493
+ddtrace-2.6.5.dist-info/METADATA,sha256=tGHBBqBIyOkA1eJ3mPq8FY6G12qN2-hl-Ef-axiZK0w,3287
+ddtrace-2.6.5.dist-info/NOTICE,sha256=Wmf6iXVNfb58zWLK5pIkcbqMflb7pl38JhxjMwmjtyc,146
+ddtrace-2.6.5.dist-info/RECORD,,
+ddtrace-2.6.5.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+ddtrace-2.6.5.dist-info/WHEEL,sha256=AI1yqBLEPcVKWn5Ls2uPawjbqPXPFTYdQLSdN8WFCJw,152
+ddtrace-2.6.5.dist-info/entry_points.txt,sha256=1t-yacpd7hsx2aKKB7_O34U414M5pPnqp-tu0XmflB8,337
+ddtrace-2.6.5.dist-info/top_level.txt,sha256=jPd7qTCAnWevz7DZiI0jdVlnFB3cautvluLsO-iMgQY,8
+ddtrace/__init__.py,sha256=bBRAg-TKVYa9IaNrop8whGGNAGY4PSofcN_FxgrIeHY,1754
+ddtrace/__pycache__/__init__.cpython-311.pyc,,
+ddtrace/__pycache__/_hooks.cpython-311.pyc,,
+ddtrace/__pycache__/_logger.cpython-311.pyc,,
+ddtrace/__pycache__/_monkey.cpython-311.pyc,,
+ddtrace/__pycache__/_version.cpython-311.pyc,,
+ddtrace/__pycache__/auto.cpython-311.pyc,,
+ddtrace/__pycache__/constants.cpython-311.pyc,,
+ddtrace/__pycache__/context.cpython-311.pyc,,
+ddtrace/__pycache__/data_streams.cpython-311.pyc,,
+ddtrace/__pycache__/filters.cpython-311.pyc,,
+ddtrace/__pycache__/pin.cpython-311.pyc,,
+ddtrace/__pycache__/provider.cpython-311.pyc,,
+ddtrace/__pycache__/sampler.cpython-311.pyc,,
+ddtrace/__pycache__/sampling_rule.cpython-311.pyc,,
+ddtrace/__pycache__/span.cpython-311.pyc,,
+ddtrace/__pycache__/tracer.cpython-311.pyc,,
+ddtrace/__pycache__/version.cpython-311.pyc,,
+ddtrace/_hooks.py,sha256=VW8lblk-yD8rBtvU7dPetjAfaeEtUVDuJCrGbPEnwMI,3865
+ddtrace/_logger.py,sha256=T55nFe_TYEfQK0NmTb513ux6yurm5s1qAWc3j-ZIA8g,3658
+ddtrace/_monkey.py,sha256=AC5dmFiLBVrA2Hl3ZJmfhSNOcGQMsX7aUFKfYBv_o3M,8905
+ddtrace/_trace/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+ddtrace/_trace/__pycache__/__init__.cpython-311.pyc,,
+ddtrace/_trace/__pycache__/_limits.cpython-311.pyc,,
+ddtrace/_trace/_limits.py,sha256=L2N27bqVnpOn2T53HDn-0rD43ek7GNuyS6piZgP-2tM,92
+ddtrace/_version.py,sha256=D_wLNXauKIfiJpn1EENjWxSq66DO-yHjqpgfDoH3wzY,411
+ddtrace/appsec/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+ddtrace/appsec/__pycache__/__init__.cpython-311.pyc,,
+ddtrace/appsec/__pycache__/_asm_request_context.cpython-311.pyc,,
+ddtrace/appsec/__pycache__/_capabilities.cpython-311.pyc,,
+ddtrace/appsec/__pycache__/_constants.cpython-311.pyc,,
+ddtrace/appsec/__pycache__/_deduplications.cpython-311.pyc,,
+ddtrace/appsec/__pycache__/_handlers.cpython-311.pyc,,
+ddtrace/appsec/__pycache__/_metrics.cpython-311.pyc,,
+ddtrace/appsec/__pycache__/_processor.cpython-311.pyc,,
+ddtrace/appsec/__pycache__/_remoteconfiguration.cpython-311.pyc,,
+ddtrace/appsec/__pycache__/_trace_utils.cpython-311.pyc,,
+ddtrace/appsec/__pycache__/_utils.cpython-311.pyc,,
+ddtrace/appsec/_api_security/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+ddtrace/appsec/_api_security/__pycache__/__init__.cpython-311.pyc,,
+ddtrace/appsec/_api_security/__pycache__/api_manager.cpython-311.pyc,,
+ddtrace/appsec/_api_security/api_manager.py,sha256=g8L3zogrDhJQGgdxULXdigviGnmzD1pEF2Oc9yVLQqQ,6217
+ddtrace/appsec/_asm_request_context.py,sha256=K4MArRjpn9AI0g5563dd2tQyu2V-FhkdELspeTtm7Wo,18228
+ddtrace/appsec/_capabilities.py,sha256=NL1qG7JMitdrEF53kkUKiXLMyvDOCIQ0Jp9s3KsUo38,2513
+ddtrace/appsec/_constants.py,sha256=pUaW4ys3NpzgbZa_GB8gOBD8Zl6lUlpNHMA8DzrLQ8s,8237
+ddtrace/appsec/_ddwaf/__init__.py,sha256=zQKZHtgw2QQv3SQ_5gGgV2z79T4C1KSqOFkCgojS6vQ,7685
+ddtrace/appsec/_ddwaf/__pycache__/__init__.cpython-311.pyc,,
+ddtrace/appsec/_ddwaf/__pycache__/ddwaf_types.cpython-311.pyc,,
+ddtrace/appsec/_ddwaf/ddwaf_types.py,sha256=rCTezCO2D7dskFQ14_2Q7VodGBgjaOEWjGY8jZSkrY0,16417
+ddtrace/appsec/_ddwaf/libddwaf/x86_64/lib/libddwaf.so,sha256=N_HdHd9oTea4Ep9fyse9AUgfHoddwt9OKJfo5iir2Oc,2195976
+ddtrace/appsec/_deduplications.py,sha256=balBc-3vYEsb9qsajcMTqZ0TYM8eaAyygDymuUTQJTA,1096
+ddtrace/appsec/_handlers.py,sha256=Z9y5pNYejwxbAEwHA1cmRwONHKgoUzJEJvmfdIk4bNU,14559
+ddtrace/appsec/_iast/__init__.py,sha256=115d3innxqsf_N85LwpM47F_rBlBENHipfV4Rh1u_DM,2528
+ddtrace/appsec/_iast/__pycache__/__init__.cpython-311.pyc,,
+ddtrace/appsec/_iast/__pycache__/_input_info.cpython-311.pyc,,
+ddtrace/appsec/_iast/__pycache__/_loader.cpython-311.pyc,,
+ddtrace/appsec/_iast/__pycache__/_metrics.cpython-311.pyc,,
+ddtrace/appsec/_iast/__pycache__/_overhead_control_engine.cpython-311.pyc,,
+ddtrace/appsec/_iast/__pycache__/_patch.cpython-311.pyc,,
+ddtrace/appsec/_iast/__pycache__/_patch_modules.cpython-311.pyc,,
+ddtrace/appsec/_iast/__pycache__/_taint_dict.cpython-311.pyc,,
+ddtrace/appsec/_iast/__pycache__/_taint_utils.cpython-311.pyc,,
+ddtrace/appsec/_iast/__pycache__/_utils.cpython-311.pyc,,
+ddtrace/appsec/_iast/__pycache__/constants.cpython-311.pyc,,
+ddtrace/appsec/_iast/__pycache__/processor.cpython-311.pyc,,
+ddtrace/appsec/_iast/__pycache__/reporter.cpython-311.pyc,,
+ddtrace/appsec/_iast/_ast/__init__.py,sha256=1oLL20yLB1GL9IbFiZD8OReDqiCpFr-yetIR6x1cNkI,23
+ddtrace/appsec/_iast/_ast/__pycache__/__init__.cpython-311.pyc,,
+ddtrace/appsec/_iast/_ast/__pycache__/ast_patching.cpython-311.pyc,,
+ddtrace/appsec/_iast/_ast/__pycache__/visitor.cpython-311.pyc,,
+ddtrace/appsec/_iast/_ast/ast_patching.py,sha256=EiFFgzy-ICs9EWYDPRRUrYezp_X-HNpwyijhq0kNQxk,5319
+ddtrace/appsec/_iast/_ast/visitor.py,sha256=i4rF8ijsM2Rp57zkBkcG-fB0aEH_GRuRuRtZyUgd_9g,31555
+ddtrace/appsec/_iast/_input_info.py,sha256=fd2GRmY1bsjrm3jFXu_dgwerOlKWO3B785FG-XhAgxQ,447
+ddtrace/appsec/_iast/_loader.py,sha256=48dE2QjNYEwYPXiy4BTtm83ucDuDYAkklFTGs55ZyCw,863
+ddtrace/appsec/_iast/_metrics.py,sha256=B6gK0mDidUAk3jfOC0CuI2Apk2yuIh-r9eI_FlC3qhA,5336
+ddtrace/appsec/_iast/_overhead_control_engine.py,sha256=BI0oXr3XLRWW4Pds5B57LmXiOOKpiM6y_Y89l4H6ZLs,4451
+ddtrace/appsec/_iast/_patch.py,sha256=oVKBEfsTpPSIb0sLEalQppf4ccXQmPNKQCiko713INQ,5764
+ddtrace/appsec/_iast/_patch_modules.py,sha256=v-C0yqWrEpr_NpRry8jufU2ZCD2Kyqa8F0SYgQV_Ff8,789
+ddtrace/appsec/_iast/_patches/__pycache__/json_tainting.cpython-311.pyc,,
+ddtrace/appsec/_iast/_patches/json_tainting.py,sha256=BS4KA9M5q5gozp0A3nRwQkbh6cM09PAgWvxP4flPmWA,2881
+ddtrace/appsec/_iast/_stacktrace.c,sha256=c-iYHfUnsbJQhIvpYVMJP1sJYOlGgbW1gicnjyYI0Tg,4745
+ddtrace/appsec/_iast/_stacktrace.cpython-311-x86_64-linux-gnu.so,sha256=fxKX5mOHeE5nSZyInZXStAuTrO-PuH_Zzn9f_CwPq6I,16976
+ddtrace/appsec/_iast/_taint_dict.py,sha256=aX-Orr9g0HkASLmEBg57z8oLP9NBzo6C6FEpkHSfVHE,522
+ddtrace/appsec/_iast/_taint_tracking/Aspects/AspectExtend.cpp,sha256=lTYdMHMeKdke5vadPDMg9dtGMLwcwnb2r1F1P1dorxs,1310
+ddtrace/appsec/_iast/_taint_tracking/Aspects/AspectExtend.h,sha256=SiBtZrE70Jl2Slk4rPyyxQC7OgPDU5SRNrbNDIs_iwQ,157
+ddtrace/appsec/_iast/_taint_tracking/Aspects/AspectFormat.cpp,sha256=ALl3sNgE9DWte8NAWMBTMObUEOM_6xJO4wGKl12Jhkk,2095
+ddtrace/appsec/_iast/_taint_tracking/Aspects/AspectFormat.h,sha256=BWDsHh9McH3GdvzjA8ljNDXb-uHQ7a3EdsPpGlBNjGw,448
+ddtrace/appsec/_iast/_taint_tracking/Aspects/AspectIndex.cpp,sha256=AS1_-KYkTbSceURhDJNKy1AOvsZ_n0HAsHsy4Fasw90,1282
+ddtrace/appsec/_iast/_taint_tracking/Aspects/AspectIndex.h,sha256=g58F_438ciqqhKtAY4yqEheDZJgv93lBCCZ1tCgpgFo,281
+ddtrace/appsec/_iast/_taint_tracking/Aspects/AspectJoin.cpp,sha256=NyoC_13HIJtS9ERrg9wCwCtL7SmDzvQBk_EmtthoSHA,7175
+ddtrace/appsec/_iast/_taint_tracking/Aspects/AspectJoin.h,sha256=Nm7Zqs_LhHyttysjV9wu86UME3a_dgEMa6le6k-xRFs,240
+ddtrace/appsec/_iast/_taint_tracking/Aspects/AspectOperatorAdd.cpp,sha256=Q11TfgGUnOO5D3n7W5AzED7cru1BmfeYbvSAcUiesgM,4129
+ddtrace/appsec/_iast/_taint_tracking/Aspects/AspectOperatorAdd.h,sha256=bmkqJlXgpphoOqBTOh098MWmjkp-kDDPRDMjyJwWtbg,213
+ddtrace/appsec/_iast/_taint_tracking/Aspects/AspectSlice.cpp,sha256=oC7zPY2n_3Nmz8GRglXY27Pz543FRXvhCb5oNvyjsZY,3853
+ddtrace/appsec/_iast/_taint_tracking/Aspects/AspectSlice.h,sha256=xBy5qunF4kNnBh1yQEJUNkHPZGx3q3yHC55OP1QKClc,163
+ddtrace/appsec/_iast/_taint_tracking/Aspects/Helpers.cpp,sha256=7158WBwsJioeb52e_InpKzkYhYd07MgYQSm4OYO7Hy4,12777
+ddtrace/appsec/_iast/_taint_tracking/Aspects/Helpers.h,sha256=PxvMWJPKAv52IhrgY2fNjWDd2n6NmHV7pdfcVaU7FZM,1834
+ddtrace/appsec/_iast/_taint_tracking/Aspects/_aspects_exports.h,sha256=W9eoyYf-sNInZU1bVn23UcQ0E-Ghut8KMTANmkXH0Bk,412
+ddtrace/appsec/_iast/_taint_tracking/CMakeLists.txt,sha256=SlR7CmiJ9ui_KtFvVLRBHg4pCrf7_6IGPxiHFNoOJjY,2305
+ddtrace/appsec/_iast/_taint_tracking/Constants.h,sha256=8pRUHCymtbzoIFKGdLnmCKDPzLY9BM6ombe0p0LLRGo,134
+ddtrace/appsec/_iast/_taint_tracking/Initializer/Initializer.cpp,sha256=81vx8MXRYX2XM3OLEuaiaCSHiXjbq0YxEpyy46s0wGM,6471
+ddtrace/appsec/_iast/_taint_tracking/Initializer/Initializer.h,sha256=M1TmaVeUl0ZCH3pQ8SEI69q7K-6vwHZi811TQLxWIxs,4548
+ddtrace/appsec/_iast/_taint_tracking/Initializer/_initializer.h,sha256=LwQAUIDDAx-Yk7_k1O6hoRGek6VoAqnMPjzYM-bV_8E,307
+ddtrace/appsec/_iast/_taint_tracking/README.txt,sha256=TLWoDrzE3qfLe88EvUQkJthIRrWYN6JG9oXTw7YDZqs,821
+ddtrace/appsec/_iast/_taint_tracking/TaintTracking/Source.cpp,sha256=rC9Eg4StydtdxwAJNaNs1CQAwi8M_WHxzlZ_3yd1sXM,2847
+ddtrace/appsec/_iast/_taint_tracking/TaintTracking/Source.h,sha256=A5BgphieMkFVAn3G8QBv5E7sV5kJ5CMI2OZbRuSuiFk,3347
+ddtrace/appsec/_iast/_taint_tracking/TaintTracking/TaintRange.cpp,sha256=3k88GA3ONPvp3cR40ARkkprmqX-b3TdrLOSEjMit6Tw,13939
+ddtrace/appsec/_iast/_taint_tracking/TaintTracking/TaintRange.h,sha256=78w6fhKoIvLf4sak3UGgTEMvCHNFBM4eG7H_3182Ewg,3983
+ddtrace/appsec/_iast/_taint_tracking/TaintTracking/TaintedObject.cpp,sha256=V_u-Z3mFcwLR2nZJRElVjZMomGjJjf01ZICQNI9H-JQ,4014
+ddtrace/appsec/_iast/_taint_tracking/TaintTracking/TaintedObject.h,sha256=yMKiLV9RupcamBpHbePvGRmpO8lGMYwBbGzsjMasPRU,1615
+ddtrace/appsec/_iast/_taint_tracking/TaintTracking/_taint_tracking.h,sha256=EVO6qgu6MyZtxsTWiomXsOpl_NaxLTT-299jwo9lpxE,474
+ddtrace/appsec/_iast/_taint_tracking/TaintedOps/TaintedOps.cpp,sha256=_1E1VqgfvXXoTrpw__J6uch8na5RuRTJvpZf9f6aZq0,1152
+ddtrace/appsec/_iast/_taint_tracking/TaintedOps/TaintedOps.h,sha256=UsV-E2mlzpGNELnbgJ1SY07CJz3BbziufhC_p2JtVOk,1032
+ddtrace/appsec/_iast/_taint_tracking/Utils/StringUtils.cpp,sha256=sKtcQp_RI8Ayi8U4m6JX-UlDFGeShvV8cprvSPVtO94,4290
+ddtrace/appsec/_iast/_taint_tracking/Utils/StringUtils.h,sha256=jPiwMj7Lo3T4vnaTlQL8Y8WESVaHvbno0lxQ5LP9flk,684
+ddtrace/appsec/_iast/_taint_tracking/__init__.py,sha256=V2yjQL_pdVkwyQOmRFJk3U-5stEKWy_YKJCMGSbMzzY,5048
+ddtrace/appsec/_iast/_taint_tracking/__pycache__/__init__.cpython-311.pyc,,
+ddtrace/appsec/_iast/_taint_tracking/__pycache__/aspects.cpython-311.pyc,,
+ddtrace/appsec/_iast/_taint_tracking/_native.cpp,sha256=7gFaQS6h_dw53_AGvbmnwJWDhq5R33DohTzPfNT5WCA,3611
+ddtrace/appsec/_iast/_taint_tracking/_native.cpython-311-x86_64-linux-gnu.so,sha256=-miouZYVKFSgF-pfMLVgoFGT9c26WLvUTmX081jgY-Y,633224
+ddtrace/appsec/_iast/_taint_tracking/_vendor/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+ddtrace/appsec/_iast/_taint_tracking/_vendor/__pycache__/__init__.cpython-311.pyc,,
+ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/CMakeLists.txt,sha256=X4_B7XO6EzkqC94TVFELCWz_CS0kxMJIo31_ohjS_gY,12067
+ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/__pycache__/__init__.cpython-311.pyc,,
+ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/include/pybind11/attr.h,sha256=QPjH7BfhL8QFwHHkrDak8gNOLMlb1itAO5fobjdoLp8,24334
+ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/include/pybind11/buffer_info.h,sha256=m_VE_hfWPKl-KgUZy9aVQdPg1xtoaDaBgkurIX7aGig,7750
+ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/include/pybind11/cast.h,sha256=Rcq-l1HCfMIyBxbJSM041wpu3EpZBVChfgVdEpdnqC0,67312
+ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/include/pybind11/chrono.h,sha256=A23naeloqn-1NKVAABOsJtHU9Vz8lfvrAICuLk-7qBM,8458
+ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/include/pybind11/common.h,sha256=ATg9Bt1pwF8qnNuI086fprM4CUTdrZdk_g2HXE1Sf6A,120
+ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/include/pybind11/complex.h,sha256=AaDZ-rEmK4tFaue-K9P5y3TxxnaQF6JwZ_6LAzkdLQI,2096
+ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/include/pybind11/detail/class.h,sha256=Y2IzTplhE5KiMiBlzWSAovCQaI_1M0dlsoDYCTpB5Hg,28518
+ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/include/pybind11/detail/common.h,sha256=rACKWPmqUkdizSQMftuCkLCZBXGbVSiuRv_h0uaY-k4,53480
+ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/include/pybind11/detail/descr.h,sha256=k1nvytx1zhMh8ERL2xS8Unbxcio5fa7eZIqnTsZ0orE,5962
+ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/include/pybind11/detail/init.h,sha256=vYO2nScstnYiCn4Kh57IKrOwpNkQ9gqME4foF03JU6A,17859
+ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/include/pybind11/detail/internals.h,sha256=Az8HDKl3QU-KEOapdwHYifjWpwbej4Js5wKvSspAjQk,28221
+ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/include/pybind11/detail/type_caster_base.h,sha256=LC91ejtvXjGQ0DaUFrYN3ChE1agf9Y2hHDs7byTbZa8,48364
+ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/include/pybind11/detail/typeid.h,sha256=jw5pr9m72vkDsloT8vxl9wj17VJGcEdXDyziBlt89Js,1625
+ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/include/pybind11/eigen.h,sha256=-HmSA1kgwCQ-GHUt7PHtTEc-vxqw9xARpF8PHWJip28,316
+ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/include/pybind11/eigen/common.h,sha256=dIeqmK7IzW5K4k2larPnA1A863rDp38U9YbNIwiIyYk,378
+ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/include/pybind11/eigen/matrix.h,sha256=CS8NpkZI8Y8ty0NFQC7GZcUlM5o8_1Abv1GbGltsbkA,32135
+ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/include/pybind11/eigen/tensor.h,sha256=jLtNZKXr7MWFplt9x3qnHdO73jNZlAqT40Hb4FPabnk,18442
+ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/include/pybind11/embed.h,sha256=xD-oEg56PadTig9a8FOcMgbsL64jaie7hwG3y6DWPEI,13459
+ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/include/pybind11/eval.h,sha256=7re-O2Eor1yD0Q_KgFkHIjKD17ejzII687Yszl9_KfE,4731
+ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/include/pybind11/functional.h,sha256=cXDJUS0Y_1GBbOK4Nn13exhkZsAQWx408HZ-PFBmbJo,5002
+ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/include/pybind11/gil.h,sha256=RZkkMm0E9PQlHXW6xkBIhM7VBeCvmyJlPVQNaSJMUQQ,8262
+ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/include/pybind11/iostream.h,sha256=K5rPXoCYN325r1PptcJCIhPhgtRtTJQjMr7bvUIOwxk,8862
+ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/include/pybind11/numpy.h,sha256=G-hxJJom5roJ7s_hTiG1Mq9NxpZ6BOzK03fLXUQuH30,79725
+ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/include/pybind11/operators.h,sha256=224RoAXcv1la4NNY9rQ3aD_AeC8S9ZKx3HVK1O8B4MU,9103
+ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/include/pybind11/options.h,sha256=qXvmnj--9fZSp56NYefnB3W5V17ppHlY1Srgo3DNBpw,2734
+ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/include/pybind11/pybind11.h,sha256=V1zKPVpON-t2yGVQigySYMALadMx-ux7eZ_xC0ILg9c,126706
+ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/include/pybind11/pytypes.h,sha256=mshHowCgq91Dt06atf5C6DFhRSWbUwYBgIl21-2usco,98455
+ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/include/pybind11/stl.h,sha256=dajIEYNnTimX5uYYLm0TzYesxq87JakWZ5KWCzbET2I,15477
+ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/include/pybind11/stl/filesystem.h,sha256=refLexmdcqOM6Qjo9QMB6heA5bQ7GZrP6DCvVBv0R1M,4185
+ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/include/pybind11/stl_bind.h,sha256=835YP_4OkcKTkNOaY-GUUXIDf86GSpN65lVivP0M4TY,29897
+ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/include/pybind11/type_caster_pyobject_ptr.h,sha256=H7pKBYTvUlibiJQEcKmeAkygSQwoCkuIyukNSDmVq-U,1929
+ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/pybind11/__init__.py,sha256=4-WhH9Ac6P8D_FqnflpOch8XlaZrkXbe95FspbMvwu0,429
+ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/pybind11/__main__.py,sha256=ATLlhFlhBxDXxxXEfnf2F1RcRhuWN1ziMwbmrGuhif0,1544
+ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/pybind11/__pycache__/__init__.cpython-311.pyc,,
+ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/pybind11/__pycache__/__main__.cpython-311.pyc,,
+ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/pybind11/__pycache__/_version.cpython-311.pyc,,
+ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/pybind11/__pycache__/commands.cpython-311.pyc,,
+ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/pybind11/__pycache__/setup_helpers.cpython-311.pyc,,
+ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/pybind11/_version.py,sha256=wUJj-zKbescYMsNA17iZHUMgzy99ahCucWDV4KMhPCg,228
+ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/pybind11/commands.py,sha256=iJBFWhXHqlC_JMAgMjMIn6H_hizvorS572upGU1roGA,1207
+ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/pybind11/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/pybind11/setup_helpers.py,sha256=CSDjuh2T5zlcZHqII8tO7HZcz8-qmMlUfCo23_WESaw,17475
+ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/tools/FindCatch.cmake,sha256=7D4GwE3lgw_0y-NMGqkGS9aTEXFteGbj3ZgXlXr3g2A,2449
+ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/tools/FindEigen3.cmake,sha256=liSnYcMw1gAxI-AZGVS0CJJsOQ2bGcDcG3LbCR5sta8,3105
+ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/tools/FindPythonLibsNew.cmake,sha256=1dEMOUQxj-xTQzlQLBBXMT4DTScNJFquOQGAWiJawCA,11190
+ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/tools/JoinPaths.cmake,sha256=eUsNj6YqO3mMffEtUBFFgNGkeiNL-2tNgwkutkam7MQ,817
+ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/tools/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/tools/__pycache__/__init__.cpython-311.pyc,,
+ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/tools/__pycache__/codespell_ignore_lines_from_errors.cpython-311.pyc,,
+ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/tools/__pycache__/libsize.cpython-311.pyc,,
+ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/tools/__pycache__/make_changelog.cpython-311.pyc,,
+ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/tools/check-style.sh,sha256=TigulaRClaWcR-fjZLt0PtH9oncUdvsDnOxFyp9CjX8,1423
+ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/tools/cmake_uninstall.cmake.in,sha256=39wpRXJHm-hEzGGkil1HbBFPITu9JOzV5xVt46JLcBE,952
+ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/tools/codespell_ignore_lines_from_errors.py,sha256=bTs7QS1-reWL04cS6C-Fh4F-TTXBgLIhemO4gfRaIgo,1117
+ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/tools/libsize.py,sha256=3MBZDCi0-kdKei_6RcTbmVJgtmT4udB-WIF-mOPLBD4,1031
+ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/tools/make_changelog.py,sha256=W1WAntnxxI8vWp6JPikaiY6FToN4vpYcXFBSJhP7ZdM,1311
+ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/tools/pybind11.pc.in,sha256=O_qrWegwZzC4WaSJ5pCnoeTCRt6-z1KRrb0gElWoBYo,196
+ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/tools/pybind11Common.cmake,sha256=JXy2aActygFERWyazURxTk0jW1MTcYtvaOL_8KFjNWw,14449
+ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/tools/pybind11Config.cmake.in,sha256=AkLEzM2gv7T49m5w32CnB1ez6uxX9P2_fUdypNc_MPo,7101
+ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/tools/pybind11NewTools.cmake,sha256=2mZ2ZUkFMsDk2JQU5hRwIjamCsljarTQu8CM8mY7P8A,8960
+ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/tools/pybind11Tools.cmake,sha256=kRoc1SvQl6NRZEHHuM-NDrfyMJF1HbgL-TiTu__Dkw8,8361
+ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/tools/pyproject.toml,sha256=JPALGLInEk7BiomriJ9xCKQW6vzO82rAvFSn1125GMA,94
+ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/tools/setup_global.py.in,sha256=01t6jThpKlPyD3SJGWPj1aiejGAzGtzlm_XjRY4fsHM,2104
+ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/tools/setup_main.py.in,sha256=XNB-0uhzvP6MP0mePkM0oWcIPpjBdwgVCeseEhTvX34,1234
+ddtrace/appsec/_iast/_taint_tracking/aspects.py,sha256=JCVHjDKVFmlnkOYOhPj6ijzDUhr299kUlF4eCiO6bjA,34809
+ddtrace/appsec/_iast/_taint_tracking/clean.sh,sha256=RF6OT4iCwc19nMNWVTK_Ka6GweEqc4Cucz_2BLsZ7-k,231
+ddtrace/appsec/_iast/_taint_utils.py,sha256=98Zd6pFR-bpWkTntbBOS_fph_EjJJY55C6KO36ldzO8,18317
+ddtrace/appsec/_iast/_utils.py,sha256=2r3bykNHgeooPO1rWL5at0K38QFq0UysF_pb5x_WOoc,3478
+ddtrace/appsec/_iast/constants.py,sha256=Z8nH-Y1aUd7OB633ehFGfESr5mcoLZnGv2mwb0jcCw4,1907
+ddtrace/appsec/_iast/processor.py,sha256=MVPSNBuE0eKZnCMNPLaDbxcMNe26Vzeh4WcwyruDaoo,2921
+ddtrace/appsec/_iast/reporter.py,sha256=ERwuOWSP8wWrauRgSbtoerRxmM-IVqoh6je3fB1nloc,2811
+ddtrace/appsec/_iast/taint_sinks/__init__.py,sha256=xDTH7yfOCdm84VRsHDvmCJSTFwY-FXxbFVs1pPjxXeQ,147
+ddtrace/appsec/_iast/taint_sinks/__pycache__/__init__.cpython-311.pyc,,
+ddtrace/appsec/_iast/taint_sinks/__pycache__/_base.cpython-311.pyc,,
+ddtrace/appsec/_iast/taint_sinks/__pycache__/ast_taint.cpython-311.pyc,,
+ddtrace/appsec/_iast/taint_sinks/__pycache__/command_injection.cpython-311.pyc,,
+ddtrace/appsec/_iast/taint_sinks/__pycache__/insecure_cookie.cpython-311.pyc,,
+ddtrace/appsec/_iast/taint_sinks/__pycache__/path_traversal.cpython-311.pyc,,
+ddtrace/appsec/_iast/taint_sinks/__pycache__/sql_injection.cpython-311.pyc,,
+ddtrace/appsec/_iast/taint_sinks/__pycache__/ssrf.cpython-311.pyc,,
+ddtrace/appsec/_iast/taint_sinks/__pycache__/weak_cipher.cpython-311.pyc,,
+ddtrace/appsec/_iast/taint_sinks/__pycache__/weak_hash.cpython-311.pyc,,
+ddtrace/appsec/_iast/taint_sinks/__pycache__/weak_randomness.cpython-311.pyc,,
+ddtrace/appsec/_iast/taint_sinks/_base.py,sha256=EDZseIRIo_Or3YhVw6ACs42r8Ro-QtKLQh1tiTLvcoI,12126
+ddtrace/appsec/_iast/taint_sinks/ast_taint.py,sha256=amvb3i-w_Wln0c_jLICoS7GRTxegunYFFGyIaN98uTY,1723
+ddtrace/appsec/_iast/taint_sinks/command_injection.py,sha256=v0W6TbctjKz_0Rru3j1ROvT8IiSR9fjC5Z14R81YPA8,9623
+ddtrace/appsec/_iast/taint_sinks/insecure_cookie.py,sha256=Rg2lp-JQUMT7QwDrrcScBph4Upj-MsS_JcE9Yajx59g,2545
+ddtrace/appsec/_iast/taint_sinks/path_traversal.py,sha256=eODEuVCuA0koBJynvQSxXTWxYXPlB8a2taeqlaCFzKo,2456
+ddtrace/appsec/_iast/taint_sinks/sql_injection.py,sha256=EZIl8U4DeWEYL9PpO76mQzjIAmTpR_Th4FumjXyvesE,1503
+ddtrace/appsec/_iast/taint_sinks/ssrf.py,sha256=FoQivFkvcHGCUHEdlnMpUIjitGIAq1_2vPaoh1ds6Wg,7558
+ddtrace/appsec/_iast/taint_sinks/weak_cipher.py,sha256=XpCBpZOGKNdls7fNS3N684rWQ5Q9SEESv3pTQu-0JYw,6302
+ddtrace/appsec/_iast/taint_sinks/weak_hash.py,sha256=CnxlkN_Yar52lZyJtvpjrFFsyFzjaJfveSoi3zf-B3c,6694
+ddtrace/appsec/_iast/taint_sinks/weak_randomness.py,sha256=28UXtKlDYspbXMlj4raZF-l_oFoTn5YKSMqjTfCXQ8E,445
+ddtrace/appsec/_metrics.py,sha256=clpTcUMi8rYlwmCfa4c0d6qzEwYKTeTKI8i7T_Xeaek,4374
+ddtrace/appsec/_processor.py,sha256=PYnoaPb6lHGVIEkud5WZQ9HADJhI5F88jb6wP-Mg_RE,17693
+ddtrace/appsec/_python_info/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+ddtrace/appsec/_python_info/__pycache__/__init__.cpython-311.pyc,,
+ddtrace/appsec/_python_info/stdlib/__init__.py,sha256=KMAkWzD1EKBveQs9pDbEQrbCLk-TVqTo2lSzYL2yWW0,634
+ddtrace/appsec/_python_info/stdlib/__pycache__/__init__.cpython-311.pyc,,
+ddtrace/appsec/_python_info/stdlib/__pycache__/module_names_py310.cpython-311.pyc,,
+ddtrace/appsec/_python_info/stdlib/__pycache__/module_names_py311.cpython-311.pyc,,
+ddtrace/appsec/_python_info/stdlib/__pycache__/module_names_py36.cpython-311.pyc,,
+ddtrace/appsec/_python_info/stdlib/__pycache__/module_names_py37.cpython-311.pyc,,
+ddtrace/appsec/_python_info/stdlib/__pycache__/module_names_py38.cpython-311.pyc,,
+ddtrace/appsec/_python_info/stdlib/__pycache__/module_names_py39.cpython-311.pyc,,
+ddtrace/appsec/_python_info/stdlib/module_names_py310.py,sha256=ngWzLCBwy9ByT-cyVIjyBSLc4WDYCRYu1vvP5Ke9fRg,3176
+ddtrace/appsec/_python_info/stdlib/module_names_py311.py,sha256=2IiNW7Hgew3aaUMD9my6yRB9ufexn2C8wJXGXbP_RTk,3177
+ddtrace/appsec/_python_info/stdlib/module_names_py36.py,sha256=4LjKXzxt67jF6chR7if5s1UQYgoqYEkUlHbqeeBUldc,3209
+ddtrace/appsec/_python_info/stdlib/module_names_py37.py,sha256=szgt5Nxtx2yWlaeepsgROy1V7LT3nUHXFiM9da9bMmY,3233
+ddtrace/appsec/_python_info/stdlib/module_names_py38.py,sha256=3MCI6Um6QWjdS1Vorn4WQxyAbZaaGYf8eYVdpH720Ho,3218
+ddtrace/appsec/_python_info/stdlib/module_names_py39.py,sha256=jvuaSW0_7wtPnmIP6BdPSCwc1SKcw5EcBmnndAhnQ0A,3206
+ddtrace/appsec/_remoteconfiguration.py,sha256=4oFpe_6LloYsA2aRVGqLX818wPio4wfXhuDPQRH-fuQ,10647
+ddtrace/appsec/_trace_utils.py,sha256=UDUADGGe7_DFLbTMgyqU-BSPpJg9DZUv76UMKwFKeRk,12733
+ddtrace/appsec/_utils.py,sha256=Srs9dBgdSZubMfOBCCVuWQtSFPixADzt_MteL459GEs,5478
+ddtrace/appsec/iast/__init__.py,sha256=CPD2rJbZh290-1clJzq3J1E0d9dpNjvR3cWyL6nLuGc,72
+ddtrace/appsec/iast/__pycache__/__init__.cpython-311.pyc,,
+ddtrace/appsec/rules.json,sha256=8KQsRDdq1ouTNZfTXt1LFdmXH45ZHAPUpjhRRTOShc4,278159
+ddtrace/appsec/trace_utils/__init__.py,sha256=oWT5uFME10wKUiQX0GDnWWAXOvmPOrPl7otH7DlUlhQ,666
+ddtrace/appsec/trace_utils/__pycache__/__init__.cpython-311.pyc,,
+ddtrace/auto.py,sha256=PrVqH550rIK7CLn3O8eUpOM6ufWI8EaFVu45li8KR_k,601
+ddtrace/bootstrap/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+ddtrace/bootstrap/__pycache__/__init__.cpython-311.pyc,,
+ddtrace/bootstrap/__pycache__/preload.cpython-311.pyc,,
+ddtrace/bootstrap/__pycache__/sitecustomize.cpython-311.pyc,,
+ddtrace/bootstrap/preload.py,sha256=8pkqcDru6SeC6Y4ZOa6k9wp-zS9dJzcoCD-roiOmuL0,4111
+ddtrace/bootstrap/sitecustomize.py,sha256=YSiZiOMB7VOPbmPO8F9AhZmljf8VmblNI-vERyHQMGg,6790
+ddtrace/commands/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+ddtrace/commands/__pycache__/__init__.cpython-311.pyc,,
+ddtrace/commands/__pycache__/ddtrace_run.cpython-311.pyc,,
+ddtrace/commands/ddtrace_run.py,sha256=4snfPPvjISyj9238hkf1dic7MSoac7CIOIDtmYNEJK4,4585
+ddtrace/constants.py,sha256=PUmBvEkUlKfxKeuVqBrGDNbq21V0iB6hl9pyfmqLZaM,1708
+ddtrace/context.py,sha256=TAaybV1PrIr0MOUYvnsAoSg59C1gM44ahI1mD_eLhKk,9543
+ddtrace/contrib/__init__.py,sha256=qFyfkycuuYfhxPcJIHW0Q5hBRwsfUaabDanAKm7fVqU,244
+ddtrace/contrib/__pycache__/__init__.cpython-311.pyc,,
+ddtrace/contrib/__pycache__/trace_utils.cpython-311.pyc,,
+ddtrace/contrib/__pycache__/trace_utils_async.cpython-311.pyc,,
+ddtrace/contrib/__pycache__/trace_utils_redis.cpython-311.pyc,,
+ddtrace/contrib/aiobotocore/__init__.py,sha256=9bV_yu3Eff-SRRAvPykek5LpkJtq7zV36lMfCBdXuJo,1023
+ddtrace/contrib/aiobotocore/__pycache__/__init__.cpython-311.pyc,,
+ddtrace/contrib/aiobotocore/__pycache__/patch.cpython-311.pyc,,
+ddtrace/contrib/aiobotocore/patch.py,sha256=vjWEkkS5ql98QRi9CTB2QPoqZUPM9u1yy7--t3DosF0,6347
+ddtrace/contrib/aiohttp/__init__.py,sha256=UmZ0ex1doI_bvV2MXui2PagPEZhJancBv4k4nETUHfg,3061
+ddtrace/contrib/aiohttp/__pycache__/__init__.cpython-311.pyc,,
+ddtrace/contrib/aiohttp/__pycache__/middlewares.cpython-311.pyc,,
+ddtrace/contrib/aiohttp/__pycache__/patch.cpython-311.pyc,,
+ddtrace/contrib/aiohttp/middlewares.py,sha256=RqDs9dWPJjMqyqqDT2eTP5YMi73lGcBwWvD-K1DgnUQ,6769
+ddtrace/contrib/aiohttp/patch.py,sha256=2uWWp6vFJYQ5gay5nM2u448dsTi6Hiyu9K8JCzny1eY,5107
+ddtrace/contrib/aiohttp_jinja2/__init__.py,sha256=ZvNEC7eSdy2UWiSJhCYoc8STU2OTgw6lHIJZMM1f0Rs,711
+ddtrace/contrib/aiohttp_jinja2/__pycache__/__init__.cpython-311.pyc,,
+ddtrace/contrib/aiohttp_jinja2/__pycache__/patch.cpython-311.pyc,,
+ddtrace/contrib/aiohttp_jinja2/patch.py,sha256=rxsbERf6uyDmKu3JLZb7WPwzDcAO2-_8zWsRgi9rWq4,2023
+ddtrace/contrib/aiomysql/__init__.py,sha256=3zK56C7PWr5uYGl5oMSYrEwlKZSICy4Hl0A3ggZ__Os,1338
+ddtrace/contrib/aiomysql/__pycache__/__init__.cpython-311.pyc,,
+ddtrace/contrib/aiomysql/__pycache__/patch.cpython-311.pyc,,
+ddtrace/contrib/aiomysql/patch.py,sha256=alWBpZ6ZBs4xy2i8MHzg7qOdjw98P6BSABTHgGbecs0,5849
+ddtrace/contrib/aiopg/__init__.py,sha256=Of3jYJ4jsNnNUDLNkstkRYWnSJ29hpOdj-GwagxZ8Hg,827
+ddtrace/contrib/aiopg/__pycache__/__init__.cpython-311.pyc,,
+ddtrace/contrib/aiopg/__pycache__/connection.cpython-311.pyc,,
+ddtrace/contrib/aiopg/__pycache__/patch.cpython-311.pyc,,
+ddtrace/contrib/aiopg/connection.py,sha256=90sl-eDM5QpbIIuM0SkmXDTKHnFRyU0D6E2L8Kr_-Do,4393
+ddtrace/contrib/aiopg/patch.py,sha256=2P8XOpJXmQsc0nhAAzjue_ddAF0x13_L0mDq_JjExDo,1887
+ddtrace/contrib/aioredis/__init__.py,sha256=C5EeJsQPTJHIILnPqRU-q28nHZZaByptetMmHcG6Fys,2222
+ddtrace/contrib/aioredis/__pycache__/__init__.cpython-311.pyc,,
+ddtrace/contrib/aioredis/__pycache__/patch.cpython-311.pyc,,
+ddtrace/contrib/aioredis/patch.py,sha256=tvDMaHqJglMm2_9N4tQxNOYgHCzSe8zyWWeMAYDmOxI,8603
+ddtrace/contrib/algoliasearch/__init__.py,sha256=Hw78TlIYgBQEA_YyfYMMwIrorhSLIcW1A0iE5bA9rOo,974
+ddtrace/contrib/algoliasearch/__pycache__/__init__.cpython-311.pyc,,
+ddtrace/contrib/algoliasearch/__pycache__/patch.cpython-311.pyc,,
+ddtrace/contrib/algoliasearch/patch.py,sha256=NoZcMQP-QNrf5oaGgafwNlINaptyvI9GmCMuLQKqJso,5696
+ddtrace/contrib/aredis/__init__.py,sha256=AF8GAoHfbKiChvZtDd8A6yKjLV39CvEbVK_yhWCX92o,2030
+ddtrace/contrib/aredis/__pycache__/__init__.cpython-311.pyc,,
+ddtrace/contrib/aredis/__pycache__/patch.cpython-311.pyc,,
+ddtrace/contrib/aredis/patch.py,sha256=D2mggX9PFLqcwnZq4s7g4MxLLZcAWN3xPb2zXhy0Frc,2783
+ddtrace/contrib/asgi/__init__.py,sha256=Wrd3MJD8DOzh3ntszQ7BckhPedB8EAjHTf9CuxUFt-c,2145
+ddtrace/contrib/asgi/__pycache__/__init__.cpython-311.pyc,,
+ddtrace/contrib/asgi/__pycache__/middleware.cpython-311.pyc,,
+ddtrace/contrib/asgi/__pycache__/utils.cpython-311.pyc,,
+ddtrace/contrib/asgi/middleware.py,sha256=p0NMvCMzbMgten6AC5Q3mVHYvAaJAiUQLCjMAAH7wCc,11437
+ddtrace/contrib/asgi/utils.py,sha256=A0YA0ZMhryqiM4jXkeWenF320SCJmXIHy4GQzl-fhXQ,3287
+ddtrace/contrib/asyncio/__init__.py,sha256=rPEAg2IyIY8Ar2O3s1p9X-n3KA-Prx1bbK_LKRgsWs4,2553
+ddtrace/contrib/asyncio/__pycache__/__init__.cpython-311.pyc,,
+ddtrace/contrib/asyncio/__pycache__/compat.cpython-311.pyc,,
+ddtrace/contrib/asyncio/__pycache__/helpers.cpython-311.pyc,,
+ddtrace/contrib/asyncio/__pycache__/patch.cpython-311.pyc,,
+ddtrace/contrib/asyncio/__pycache__/provider.cpython-311.pyc,,
+ddtrace/contrib/asyncio/__pycache__/wrappers.cpython-311.pyc,,
+ddtrace/contrib/asyncio/compat.py,sha256=HgAfoCMH2TTyooC4siI5pH2472-mFzC8-yrAExm0m7U,278
+ddtrace/contrib/asyncio/helpers.py,sha256=G1L7FBnpFVBkn5m9dEFHdXLG1ECY2LANpJAN2H3_nEU,3102
+ddtrace/contrib/asyncio/patch.py,sha256=QcFgpIKsiNB5kRO2mSXTT_bdTP44zhzKF-Js_3GPHYo,463
+ddtrace/contrib/asyncio/provider.py,sha256=CIZMUeR3Glm-WrO7PQH0OUMUSUXjI-YMMv73zueWFpA,2855
+ddtrace/contrib/asyncio/wrappers.py,sha256=svG3-7JQ9kM1pFcKYCji-LRGGqe5ZDUTFx4h3qkDpsM,953
+ddtrace/contrib/asyncpg/__init__.py,sha256=cGQezxZJ2UYMlK1ypBzQ3WuVksl9l1UQbBIc1RngNJM,1322
+ddtrace/contrib/asyncpg/__pycache__/__init__.cpython-311.pyc,,
+ddtrace/contrib/asyncpg/__pycache__/patch.cpython-311.pyc,,
+ddtrace/contrib/asyncpg/patch.py,sha256=JIkh5ZdxjWjoDjYypfWNIuvPhOQbDX5zslLjil8AYVo,4664
+ddtrace/contrib/aws_lambda/__init__.py,sha256=h0Wsf5SvBEy9bJVhVSNBPICTFBhtgHsGnzJUU8m8TQI,1434
+ddtrace/contrib/aws_lambda/__pycache__/__init__.cpython-311.pyc,,
+ddtrace/contrib/aws_lambda/__pycache__/_cold_start.cpython-311.pyc,,
+ddtrace/contrib/aws_lambda/__pycache__/patch.cpython-311.pyc,,
+ddtrace/contrib/aws_lambda/_cold_start.py,sha256=nOvLzOrLto40M_JlV2NY_7qfI-HbhcaMiMwDr1MHTXI,482
+ddtrace/contrib/aws_lambda/patch.py,sha256=EitsmJ9HAlA2e414488WIpNJ0xtEDcCHl5mD8KJ4bzY,9218
+ddtrace/contrib/boto/__init__.py,sha256=IfPRmsVDN_OiiLPZAQg3oXgTrRGC9Jw1E2BcYNtU3vs,1019
+ddtrace/contrib/boto/__pycache__/__init__.cpython-311.pyc,,
+ddtrace/contrib/boto/__pycache__/patch.cpython-311.pyc,,
+ddtrace/contrib/boto/patch.py,sha256=rAWuYvUvCpVk67pnM68uA6KhpVL7yQdksYmCH91KEO8,6881
+ddtrace/contrib/botocore/__init__.py,sha256=f-8_Ra8c1IUM3mK63D300YYUjeZY3dvQfovBYkjOeFM,3870
+ddtrace/contrib/botocore/__pycache__/__init__.cpython-311.pyc,,
+ddtrace/contrib/botocore/__pycache__/patch.cpython-311.pyc,,
+ddtrace/contrib/botocore/__pycache__/utils.cpython-311.pyc,,
+ddtrace/contrib/botocore/patch.py,sha256=w8cSKtZLuZxEtNDRmyDCmKBhMLGumGjWBN7L5FWulE4,11389
+ddtrace/contrib/botocore/services/__pycache__/bedrock.cpython-311.pyc,,
+ddtrace/contrib/botocore/services/__pycache__/kinesis.cpython-311.pyc,,
+ddtrace/contrib/botocore/services/__pycache__/sqs.cpython-311.pyc,,
+ddtrace/contrib/botocore/services/__pycache__/stepfunctions.cpython-311.pyc,,
+ddtrace/contrib/botocore/services/bedrock.py,sha256=w7xujld-YvXFXYNH-gWmJAec5-ar397OvDj7e-BPOhw,15713
+ddtrace/contrib/botocore/services/kinesis.py,sha256=879NrScMaMaZ90q3FQNYQVnkmL-T5PX_qlIfHw6xXek,7430
+ddtrace/contrib/botocore/services/sqs.py,sha256=2pN--HSpNZl6eB_W1hINRBaetKe1YMx1V0vkMlnoY4Q,10521
+ddtrace/contrib/botocore/services/stepfunctions.py,sha256=NOjLyVlJqb0qKEh0ze1KsOMLHI1Ef63SBMc2zaGx8HI,4122
+ddtrace/contrib/botocore/utils.py,sha256=iUUVH1O_UWBrlK-7pUz4EQM61KYb1FhnQgac7ghEcTo,9177
+ddtrace/contrib/bottle/__init__.py,sha256=wy6alj22Uo4lNzk8BQUEnA__lGx7P7Beqxp2yiAF-Ug,1140
+ddtrace/contrib/bottle/__pycache__/__init__.cpython-311.pyc,,
+ddtrace/contrib/bottle/__pycache__/patch.cpython-311.pyc,,
+ddtrace/contrib/bottle/__pycache__/trace.cpython-311.pyc,,
+ddtrace/contrib/bottle/patch.py,sha256=9OzWL40cv559g1gAyN0RctZH_yiRteJUqhvYYjd4y7E,861
+ddtrace/contrib/bottle/trace.py,sha256=wXEhP6piR2oEemPFXsRE4bOR48Pz6V3JZoyc7Yblh5M,4246
+ddtrace/contrib/cassandra/__init__.py,sha256=LeXodkO4NBuOndLu-iZ0oPT4XZsG71-keSw7w5wux0A,1190
+ddtrace/contrib/cassandra/__pycache__/__init__.cpython-311.pyc,,
+ddtrace/contrib/cassandra/__pycache__/patch.cpython-311.pyc,,
+ddtrace/contrib/cassandra/__pycache__/session.cpython-311.pyc,,
+ddtrace/contrib/cassandra/patch.py,sha256=gTEULNQXWMvEH4VQHcvBthVH3XGWytnHLMCum2XcjeQ,89
+ddtrace/contrib/cassandra/session.py,sha256=KkpujlPSpcev1pvdaU6iVQR4e-qheqqL3ve5eDiCI0M,10272
+ddtrace/contrib/celery/__init__.py,sha256=Kgp_FgcEiQF4VOVE0TqIRo5OqwssjvNePtF34ujRz6Q,1583
+ddtrace/contrib/celery/__pycache__/__init__.cpython-311.pyc,,
+ddtrace/contrib/celery/__pycache__/app.cpython-311.pyc,,
+ddtrace/contrib/celery/__pycache__/constants.cpython-311.pyc,,
+ddtrace/contrib/celery/__pycache__/patch.cpython-311.pyc,,
+ddtrace/contrib/celery/__pycache__/signals.cpython-311.pyc,,
+ddtrace/contrib/celery/__pycache__/utils.cpython-311.pyc,,
+ddtrace/contrib/celery/app.py,sha256=t4tRJ3o2ZN8s-q3_Fyp9e3W95ugxL65WfTTcj_ARmfk,3340
+ddtrace/contrib/celery/constants.py,sha256=2ULjjEu1VmKSb26zHXWcWhScx3uGP_nc7SoxRdo_jkk,470
+ddtrace/contrib/celery/patch.py,sha256=5Jc37i_k0w1juOz6TFyVnyAty9n8hEhQ1zeZTsUBSIQ,1138
+ddtrace/contrib/celery/signals.py,sha256=YyM7Oex2_1aDfx1sIahMh7a4sC3umEDaSnvKaNp8QoI,8211
+ddtrace/contrib/celery/utils.py,sha256=ICRnxclfoa-OqWypL-YivweF02MvGvHGX9csKnAJ_GU,4545
+ddtrace/contrib/cherrypy/__init__.py,sha256=GU1w6levb2eE_O4_L8F5KXSNeKSYrD_tzsVS1sXJEXQ,1642
+ddtrace/contrib/cherrypy/__pycache__/__init__.cpython-311.pyc,,
+ddtrace/contrib/cherrypy/__pycache__/middleware.cpython-311.pyc,,
+ddtrace/contrib/cherrypy/middleware.py,sha256=5P_rk9yVIZ_8ccUtXAfsCK4BTHTHrPfaiALM0ZTj4lA,5743
+ddtrace/contrib/consul/__init__.py,sha256=0i1qNV6XNYcA1Se0_X0h5y3e60lPzorE_-d0cDQexlE,907
+ddtrace/contrib/consul/__pycache__/__init__.cpython-311.pyc,,
+ddtrace/contrib/consul/__pycache__/patch.cpython-311.pyc,,
+ddtrace/contrib/consul/patch.py,sha256=Y3gY9ku8OBveB4o2O2y14BhZnHBUYrPgnA_thM6Jdtw,2685
+ddtrace/contrib/coverage/__init__.py,sha256=94_a-BjnFq_5-89Q293HWZirZsklo-dnmmXAIItW9a4,914
+ddtrace/contrib/coverage/__pycache__/__init__.cpython-311.pyc,,
+ddtrace/contrib/coverage/__pycache__/constants.cpython-311.pyc,,
+ddtrace/contrib/coverage/__pycache__/data.cpython-311.pyc,,
+ddtrace/contrib/coverage/__pycache__/patch.cpython-311.pyc,,
+ddtrace/contrib/coverage/__pycache__/utils.cpython-311.pyc,,
+ddtrace/contrib/coverage/constants.py,sha256=6llF4KSvJb8GfOYvQUdRlMjZN23Y8n3YNcECnkWcpjM,33
+ddtrace/contrib/coverage/data.py,sha256=l-JnkQjfIw_xakUxD6xeohdndOPXLf-7WWuTsX90wCw,100
+ddtrace/contrib/coverage/patch.py,sha256=3EIgmvgmpa-jP87J7XtNK9NjCXr5sWlVIBPHEqD4-fo,1604
+ddtrace/contrib/coverage/utils.py,sha256=g_O_N-Fef9859SpUxS64voJh01Pad48t2iD1WeQjWOg,664
+ddtrace/contrib/dbapi/__init__.py,sha256=fcSUo-w_COulEPTpZ0O2B1uEo1uV8YW3C8OOWvFipAY,14002
+ddtrace/contrib/dbapi/__pycache__/__init__.cpython-311.pyc,,
+ddtrace/contrib/dbapi_async/__init__.py,sha256=igsTeNZG69SzZe09FvjnVxwYG5K5PFfDTECxF6noF1c,11190
+ddtrace/contrib/dbapi_async/__pycache__/__init__.cpython-311.pyc,,
+ddtrace/contrib/django/__init__.py,sha256=JWv5O5uYYbZlagVDmEBhyvozo5Jm9EMpxywp45_jk7o,5488
+ddtrace/contrib/django/__pycache__/__init__.cpython-311.pyc,,
+ddtrace/contrib/django/__pycache__/_asgi.cpython-311.pyc,,
+ddtrace/contrib/django/__pycache__/compat.cpython-311.pyc,,
+ddtrace/contrib/django/__pycache__/patch.cpython-311.pyc,,
+ddtrace/contrib/django/__pycache__/restframework.cpython-311.pyc,,
+ddtrace/contrib/django/__pycache__/utils.cpython-311.pyc,,
+ddtrace/contrib/django/_asgi.py,sha256=qzP0RaYXUaEhMa0QfF2hU7hdMt6XSQU76ZzozeZJLKE,1390
+ddtrace/contrib/django/compat.py,sha256=AAjJn7NXuQMNFsyrcSc_B7B1NyugPTB-wEUSvz3knOY,908
+ddtrace/contrib/django/patch.py,sha256=TTJSayW6SpRGgQBgFYSfRsOfEIWYZTRtL3HipiYNPoY,34624
+ddtrace/contrib/django/restframework.py,sha256=pZ0I_FEwZA2L0yg3aUAw1yWRUXpQi-ccCRy3PPxlFvs,1155
+ddtrace/contrib/django/utils.py,sha256=K61VSWZA6ooTxFS0NGE9UsbnVWFoCgMN05NzUsbEN1M,16558
+ddtrace/contrib/dogpile_cache/__init__.py,sha256=rnl4KhUBRUcZC7FNnu1g9gcJZu78FOgNsZfuN2di_CU,1653
+ddtrace/contrib/dogpile_cache/__pycache__/__init__.cpython-311.pyc,,
+ddtrace/contrib/dogpile_cache/__pycache__/lock.cpython-311.pyc,,
+ddtrace/contrib/dogpile_cache/__pycache__/patch.cpython-311.pyc,,
+ddtrace/contrib/dogpile_cache/__pycache__/region.cpython-311.pyc,,
+ddtrace/contrib/dogpile_cache/lock.py,sha256=zmEG-OVH_veGeupY4UdxAZ6kERgjmpHZLjUMupFT6vc,1636
+ddtrace/contrib/dogpile_cache/patch.py,sha256=e8wiG1obluHLLPObyb9l9t_TajaRy7FnhqGeksv_mHk,1875
+ddtrace/contrib/dogpile_cache/region.py,sha256=DGO_uSOTEM3lAyug5bTFo1Pf0_NgNPliRW9vhkrwUWw,1990
+ddtrace/contrib/elasticsearch/__init__.py,sha256=dVviFwaMSxlUctu7yARm2yezD2W4bzirnLu2U551lds,1526
+ddtrace/contrib/elasticsearch/__pycache__/__init__.cpython-311.pyc,,
+ddtrace/contrib/elasticsearch/__pycache__/patch.cpython-311.pyc,,
+ddtrace/contrib/elasticsearch/__pycache__/quantize.cpython-311.pyc,,
+ddtrace/contrib/elasticsearch/patch.py,sha256=D2UuaxvwsTahHGUphybiB8urrGUjRMUf-qpoIvrlvfE,8926
+ddtrace/contrib/elasticsearch/quantize.py,sha256=wdiGa9N2CcPXw614lwb6-kCTBrxvkubtmb4KI2S9H5c,1052
+ddtrace/contrib/falcon/__init__.py,sha256=XbjcXYfnw7lLbgJh9u-2ylkut7UtQ2O29z6v-TzYCBs,1512
+ddtrace/contrib/falcon/__pycache__/__init__.cpython-311.pyc,,
+ddtrace/contrib/falcon/__pycache__/middleware.cpython-311.pyc,,
+ddtrace/contrib/falcon/__pycache__/patch.cpython-311.pyc,,
+ddtrace/contrib/falcon/middleware.py,sha256=qIOvgkBJLsYaJMW60NQsrokNL91UFDe4lx1zamkU6dA,4444
+ddtrace/contrib/falcon/patch.py,sha256=tM-0kfBFRW4F0DlkD46Lz_KGqK6wD0jyh_61ZIrvLjw,1217
+ddtrace/contrib/fastapi/__init__.py,sha256=7j0YDwboKR_ij9DP-qjB99mpUC9lkCUf3Yyyf1SDtac,1978
+ddtrace/contrib/fastapi/__pycache__/__init__.cpython-311.pyc,,
+ddtrace/contrib/fastapi/__pycache__/patch.cpython-311.pyc,,
+ddtrace/contrib/fastapi/patch.py,sha256=uaii_6XeyBizbCT7VFN8lMBi2ixSBFsqsx7Ls_HFr9w,3401
+ddtrace/contrib/flask/__init__.py,sha256=3w7QicHrmgLmQD7GkCXX56a_-UnBquxDzW3TiweLvE0,2864
+ddtrace/contrib/flask/__pycache__/__init__.cpython-311.pyc,,
+ddtrace/contrib/flask/__pycache__/patch.cpython-311.pyc,,
+ddtrace/contrib/flask/__pycache__/wrappers.cpython-311.pyc,,
+ddtrace/contrib/flask/patch.py,sha256=n0Sxkz5t4T1g82lfafUWwWXdKM1V9wyeUaHs3aeTEcw,19694
+ddtrace/contrib/flask/wrappers.py,sha256=6jphZzUVDvfJuc_adDTZ7o3ih_y7u1A7LK2BeoIgg8M,3187
+ddtrace/contrib/flask_cache/__init__.py,sha256=MfOIBgrD0uPsa0b2lfoG-G3Mnzti5S8PY51P0V1aW2g,1702
+ddtrace/contrib/flask_cache/__pycache__/__init__.cpython-311.pyc,,
+ddtrace/contrib/flask_cache/__pycache__/tracers.cpython-311.pyc,,
+ddtrace/contrib/flask_cache/__pycache__/utils.cpython-311.pyc,,
+ddtrace/contrib/flask_cache/tracers.py,sha256=NXbnigTR_MXnVpzvDvaST0Kx7HzgLZELvQBTvNDFDyQ,6752
+ddtrace/contrib/flask_cache/utils.py,sha256=V6nc8kZKp_dhQFyco_gx3ez0FCZYfF6bcYLf_hgw5eE,2186
+ddtrace/contrib/flask_login/__init__.py,sha256=29ErfIxFnpOOKktBl-Ag9keQWFTlZTT-tPivqncowBc,1919
+ddtrace/contrib/flask_login/__pycache__/__init__.cpython-311.pyc,,
+ddtrace/contrib/flask_login/__pycache__/patch.cpython-311.pyc,,
+ddtrace/contrib/flask_login/patch.py,sha256=Gfp2jwW-4gBvd38Qs_Sb2vdW1RRS6cN-ZfwgUj6Txwk,3626
+ddtrace/contrib/futures/__init__.py,sha256=RBln83ltDdVksGsSFM8rsDv9m9RsECNAQify_Rrkf3s,979
+ddtrace/contrib/futures/__pycache__/__init__.cpython-311.pyc,,
+ddtrace/contrib/futures/__pycache__/patch.cpython-311.pyc,,
+ddtrace/contrib/futures/__pycache__/threading.cpython-311.pyc,,
+ddtrace/contrib/futures/patch.py,sha256=_CgJp1Zf5pxHzqdQuYaxfDc1DpEvm1GZz9jQS-VMPhI,1106
+ddtrace/contrib/futures/threading.py,sha256=KsSEBnoKn6Qy_2T8AGN8JneN7RBoQrmWGWxN6Cep9iE,1822
+ddtrace/contrib/gevent/__init__.py,sha256=lwZFVcW4MMkAu6YiD8hOBj1qqxSLwVUy8ni6ENaRpJ0,1962
+ddtrace/contrib/gevent/__pycache__/__init__.cpython-311.pyc,,
+ddtrace/contrib/gevent/__pycache__/greenlet.cpython-311.pyc,,
+ddtrace/contrib/gevent/__pycache__/patch.cpython-311.pyc,,
+ddtrace/contrib/gevent/__pycache__/provider.cpython-311.pyc,,
+ddtrace/contrib/gevent/greenlet.py,sha256=rEzvw7pibHIXNe8tOK4Jm9J7Cb4pK6_nt-me8p6lNVE,2241
+ddtrace/contrib/gevent/patch.py,sha256=XiU7qr-im4NYZmwTUAPL4221Ii7-Fc-I6XN_zQDDp8c,2538
+ddtrace/contrib/gevent/provider.py,sha256=wWh_EnT4xdpekgdJhiqGWnTj2CXyUfLlYJAVYX2WrjA,1468
+ddtrace/contrib/graphql/__init__.py,sha256=oCrbJUcgUTbC-Evfg4Xb5d8RtHQEBs_xlt1v38JY3-8,1482
+ddtrace/contrib/graphql/__pycache__/__init__.cpython-311.pyc,,
+ddtrace/contrib/graphql/__pycache__/patch.cpython-311.pyc,,
+ddtrace/contrib/graphql/patch.py,sha256=DIJXmXJUJPJRlQ1x7DZhqktcNcbSVKy9sprc3aB91UA,11437
+ddtrace/contrib/grpc/__init__.py,sha256=g5ckIPN9aK0aTIzWi624l9t-5mtxKLKyENjALrTm_iQ,2248
+ddtrace/contrib/grpc/__pycache__/__init__.cpython-311.pyc,,
+ddtrace/contrib/grpc/__pycache__/aio_client_interceptor.cpython-311.pyc,,
+ddtrace/contrib/grpc/__pycache__/aio_server_interceptor.cpython-311.pyc,,
+ddtrace/contrib/grpc/__pycache__/client_interceptor.cpython-311.pyc,,
+ddtrace/contrib/grpc/__pycache__/constants.cpython-311.pyc,,
+ddtrace/contrib/grpc/__pycache__/patch.cpython-311.pyc,,
+ddtrace/contrib/grpc/__pycache__/server_interceptor.cpython-311.pyc,,
+ddtrace/contrib/grpc/__pycache__/utils.cpython-311.pyc,,
+ddtrace/contrib/grpc/aio_client_interceptor.py,sha256=JuAd5dzJMYDqj3Y6kbZVvZLj5YU4WofAA0GvrtK_K1w,9950
+ddtrace/contrib/grpc/aio_server_interceptor.py,sha256=QxFDDHUpplTcDMyAuCMsHXWKe4uNzcK2ILWU1TMZgXw,12388
+ddtrace/contrib/grpc/client_interceptor.py,sha256=paMEhofuD2CPb8yQH2ZgYK5JKbH92jIQ7E1if6VvW4k,10747
+ddtrace/contrib/grpc/constants.py,sha256=g4pTnEmFqAabsQBLDCvKoB5lKZCtZc-ah2qs6VEVOPw,1016
+ddtrace/contrib/grpc/patch.py,sha256=HqfgTPXP2pBiUpnCn2UtYxL57nZy5rJwWlbppnMUUJk,7526
+ddtrace/contrib/grpc/server_interceptor.py,sha256=NWQmtbk8NjZ1qYWJEqrCpb5UokaX4ZV8uhPTkxGw15I,5161
+ddtrace/contrib/grpc/utils.py,sha256=FAfO5UyMg2nkLZ_NxVzR3_z8v3Z6tqIytNsn6L6tsdQ,2970
+ddtrace/contrib/gunicorn/__init__.py,sha256=tEinb0tU3N8cW111sI9gRlL5dNpL_hkhWwd8lzX5s6A,421
+ddtrace/contrib/gunicorn/__pycache__/__init__.cpython-311.pyc,,
+ddtrace/contrib/httplib/__init__.py,sha256=wol5d0NIeR9jhdqC0CXkMx42wkAh989LypeaUOHPKmU,1604
+ddtrace/contrib/httplib/__pycache__/__init__.cpython-311.pyc,,
+ddtrace/contrib/httplib/__pycache__/patch.cpython-311.pyc,,
+ddtrace/contrib/httplib/patch.py,sha256=m-ooHqTSl1aL5RUZpVO8n7O3RhEqRdwndzIILROZAto,8130
+ddtrace/contrib/httpx/__init__.py,sha256=sDbsKCk_Yv_ZsJaKWU7nYDh5uCW2tVCZkOUlyjihrfU,2556
+ddtrace/contrib/httpx/__pycache__/__init__.cpython-311.pyc,,
+ddtrace/contrib/httpx/__pycache__/patch.cpython-311.pyc,,
+ddtrace/contrib/httpx/patch.py,sha256=-cLHcKUWnpLy9hxRnc94yxtiTLpBjwWquvgupMT6n_w,6951
+ddtrace/contrib/jinja2/__init__.py,sha256=sbeozQZfnw1pyXNKhwefKO-X8Le1cI6fEawccPuLyDo,1273
+ddtrace/contrib/jinja2/__pycache__/__init__.cpython-311.pyc,,
+ddtrace/contrib/jinja2/__pycache__/constants.cpython-311.pyc,,
+ddtrace/contrib/jinja2/__pycache__/patch.cpython-311.pyc,,
+ddtrace/contrib/jinja2/constants.py,sha256=BpRqO-bQIWzrHFO7DtJAZ4v7lH7HpYWjYri7yCOG-Ts,35
+ddtrace/contrib/jinja2/patch.py,sha256=bFKs5VUF1LICVwCJkSJ6EOkFi3pZNSUJyA8CCf3EAnY,3483
+ddtrace/contrib/kafka/__init__.py,sha256=d2GeIQqFVrPhkO_JAKvgPx73iW2RjqOAzAggyydnb4s,1375
+ddtrace/contrib/kafka/__pycache__/__init__.cpython-311.pyc,,
+ddtrace/contrib/kafka/__pycache__/patch.cpython-311.pyc,,
+ddtrace/contrib/kafka/patch.py,sha256=m2BTr7FrWEAU_LbkXafddxsCZiKqEhw-HL3Z_affW9k,11518
+ddtrace/contrib/kombu/__init__.py,sha256=SWh0sC8gHZ71qvDd5j2TiOZQ1h2ECnSCONntoSOhgbo,1671
+ddtrace/contrib/kombu/__pycache__/__init__.cpython-311.pyc,,
+ddtrace/contrib/kombu/__pycache__/constants.cpython-311.pyc,,
+ddtrace/contrib/kombu/__pycache__/patch.cpython-311.pyc,,
+ddtrace/contrib/kombu/__pycache__/utils.cpython-311.pyc,,
+ddtrace/contrib/kombu/constants.py,sha256=RhhLOyKsbyFLnko4wvuyDQvBv_wy0CMN27k4akMSlak,26
+ddtrace/contrib/kombu/patch.py,sha256=QEz5P8WCVjVo0SjV1e9cLOFq2kpoPQ1ZPN5X3nublnU,5818
+ddtrace/contrib/kombu/utils.py,sha256=xjAzg2zr7o74Iy7Bc2vIQvfvm_W82ZzvbiBHLNtCa7E,1101
+ddtrace/contrib/langchain/__init__.py,sha256=iy5GzZMp1ApEevdyPrtQBqZ9Z81RWmlLCBlZD7FE8Zk,6459
+ddtrace/contrib/langchain/__pycache__/__init__.cpython-311.pyc,,
+ddtrace/contrib/langchain/__pycache__/constants.cpython-311.pyc,,
+ddtrace/contrib/langchain/__pycache__/patch.cpython-311.pyc,,
+ddtrace/contrib/langchain/constants.py,sha256=hgeiUEiR5fjYj7w2uxO1osAesux0172juwSES-gE2uY,2478
+ddtrace/contrib/langchain/patch.py,sha256=zAhRvOnX0vXC0hgr-lToG8sti4NF92Z6VeYwOmtdk2I,36845
+ddtrace/contrib/logbook/__init__.py,sha256=lm-ge5wJYMIzmW7Vm_uu0IgElav1tgiSGruvCCvKZRo,2578
+ddtrace/contrib/logbook/__pycache__/__init__.cpython-311.pyc,,
+ddtrace/contrib/logbook/__pycache__/patch.cpython-311.pyc,,
+ddtrace/contrib/logbook/patch.py,sha256=Np-C41AcLIDyDBgbqlSgX75vb2cz3k6jUnxCP0Ms5sI,2327
+ddtrace/contrib/logging/__init__.py,sha256=Q1NLFHbcjRFBLg05AQ96Qm-vsA-o3cPYTAOYS3b-G7I,2145
+ddtrace/contrib/logging/__pycache__/__init__.cpython-311.pyc,,
+ddtrace/contrib/logging/__pycache__/constants.cpython-311.pyc,,
+ddtrace/contrib/logging/__pycache__/patch.cpython-311.pyc,,
+ddtrace/contrib/logging/constants.py,sha256=kOOXgHFz3wb4Im9UuARht1TPZaOpVA_QvzM0j0bHJs4,227
+ddtrace/contrib/logging/patch.py,sha256=zGfI465HqorDyuV5tfBna-sCyHavpHRzaCnaoOyPW9k,5077
+ddtrace/contrib/loguru/__init__.py,sha256=gs8t4FTGLbWDfsafRbWkBxNQsCPKc1G9b0X6B94xgsU,2671
+ddtrace/contrib/loguru/__pycache__/__init__.cpython-311.pyc,,
+ddtrace/contrib/loguru/__pycache__/patch.cpython-311.pyc,,
+ddtrace/contrib/loguru/patch.py,sha256=2OaCdYyumFeRlUjhN7pFfLDbBXdhjeFeR8A40nJXRww,2835
+ddtrace/contrib/mako/__init__.py,sha256=hXN21PGRMpV9RkGA3-ws5j_YaXXrBaKJp2Kcwze9LpQ,616
+ddtrace/contrib/mako/__pycache__/__init__.cpython-311.pyc,,
+ddtrace/contrib/mako/__pycache__/constants.cpython-311.pyc,,
+ddtrace/contrib/mako/__pycache__/patch.cpython-311.pyc,,
+ddtrace/contrib/mako/constants.py,sha256=BpRqO-bQIWzrHFO7DtJAZ4v7lH7HpYWjYri7yCOG-Ts,35
+ddtrace/contrib/mako/patch.py,sha256=rUg6jdkaDxLobRFyHnFNfeziSP2coaojPd4QfMgrSIM,2298
+ddtrace/contrib/mariadb/__init__.py,sha256=yV-9X6B-EnmFd8CbqcfZWu6s-WPML5vtJJ9mE4YTcOw,1716
+ddtrace/contrib/mariadb/__pycache__/__init__.cpython-311.pyc,,
+ddtrace/contrib/mariadb/__pycache__/patch.cpython-311.pyc,,
+ddtrace/contrib/mariadb/patch.py,sha256=tQnNB0Zo9xhxmPV6XFmRRJ12eoKO3gq8TwJpCKOVY3U,1501
+ddtrace/contrib/molten/__init__.py,sha256=QsJLSom7Lphx2S4uofnr7XN_7Ge8SGPljCO312AqqGo,1271
+ddtrace/contrib/molten/__pycache__/__init__.cpython-311.pyc,,
+ddtrace/contrib/molten/__pycache__/patch.cpython-311.pyc,,
+ddtrace/contrib/molten/__pycache__/wrappers.cpython-311.pyc,,
+ddtrace/contrib/molten/patch.py,sha256=_x1b6d6kG9FOePSj-CuHTZ9iE9NV947Ut_WkEVluQCE,5838
+ddtrace/contrib/molten/wrappers.py,sha256=xpd3ygO-4rnuQWhtXEdJYq1IJa5V6qv0zPK10xCafmQ,4018
+ddtrace/contrib/mongoengine/__init__.py,sha256=1Buh90LANDKZiIREQFKm__Ai5LBx92XSk72nkkPrgnI,897
+ddtrace/contrib/mongoengine/__pycache__/__init__.cpython-311.pyc,,
+ddtrace/contrib/mongoengine/__pycache__/patch.cpython-311.pyc,,
+ddtrace/contrib/mongoengine/__pycache__/trace.cpython-311.pyc,,
+ddtrace/contrib/mongoengine/patch.py,sha256=Zg18ToqJFeGIASS52ZbF7N4nMDXc2G4GRf2Q89YSCnk,327
+ddtrace/contrib/mongoengine/trace.py,sha256=pweTTRGYAKhUoyIqyZEJjobCc2CmSwDfj5T4NZvikzk,1269
+ddtrace/contrib/mysql/__init__.py,sha256=4LqJc0VntMxCQzxWJBSy7WJYUq5U9ihZRe6fx25olWc,2070
+ddtrace/contrib/mysql/__pycache__/__init__.cpython-311.pyc,,
+ddtrace/contrib/mysql/__pycache__/patch.cpython-311.pyc,,
+ddtrace/contrib/mysql/patch.py,sha256=9O1RUsLCN-KTeOgGm5lJfVZvh_nKehb5iU-yOMoiLic,1895
+ddtrace/contrib/mysqldb/__init__.py,sha256=-PtsHQA_acRQSd1msdEEh09RKpB5U-Zf0Zd4qXaMM-4,2418
+ddtrace/contrib/mysqldb/__pycache__/__init__.cpython-311.pyc,,
+ddtrace/contrib/mysqldb/__pycache__/patch.cpython-311.pyc,,
+ddtrace/contrib/mysqldb/patch.py,sha256=_p8AaBY4tbcPekQEp1rsSFgbP0OswmzMLynzeMQzz9E,3286
+ddtrace/contrib/openai/__init__.py,sha256=gqjPSjrQfizPnu2MHoSzpgAA1WPTth8XBpTAlx6Qe4I,8349
+ddtrace/contrib/openai/__pycache__/__init__.cpython-311.pyc,,
+ddtrace/contrib/openai/__pycache__/_endpoint_hooks.cpython-311.pyc,,
+ddtrace/contrib/openai/__pycache__/patch.cpython-311.pyc,,
+ddtrace/contrib/openai/__pycache__/utils.cpython-311.pyc,,
+ddtrace/contrib/openai/_endpoint_hooks.py,sha256=Nd45jIy0DYhr4DxJBcO929eDeAERuI0VxHznjE9hLJ8,38541
+ddtrace/contrib/openai/patch.py,sha256=gOQ8a3uyjPdqCH-z3nUS43lModo60gzUrmRhn0wMyuU,15797
+ddtrace/contrib/openai/utils.py,sha256=Sc37PAWkqwwlBzqCvCTTGWF4wRwTjIUf6FGN79iegWs,4308
+ddtrace/contrib/psycopg/__init__.py,sha256=TpbJmEjFAg2f44h6tJNNRHqG5NtTk6jVP3wv-HzVZvU,1646
+ddtrace/contrib/psycopg/__pycache__/__init__.cpython-311.pyc,,
+ddtrace/contrib/psycopg/__pycache__/async_connection.cpython-311.pyc,,
+ddtrace/contrib/psycopg/__pycache__/async_cursor.cpython-311.pyc,,
+ddtrace/contrib/psycopg/__pycache__/connection.cpython-311.pyc,,
+ddtrace/contrib/psycopg/__pycache__/cursor.cpython-311.pyc,,
+ddtrace/contrib/psycopg/__pycache__/extensions.cpython-311.pyc,,
+ddtrace/contrib/psycopg/__pycache__/patch.cpython-311.pyc,,
+ddtrace/contrib/psycopg/async_connection.py,sha256=C5J8oNkn8qDnxcofPHC3DsX_pt_f0tX9R_s_m0kF8Qw,2784
+ddtrace/contrib/psycopg/async_cursor.py,sha256=5_b6hgUMogq_k0Pw9UqyWY8A0qh4pny4ntuBLb7gXrg,1147
+ddtrace/contrib/psycopg/connection.py,sha256=hWCZ5FA8qOrpv9S1J8iwIwqUnQMtINeyOR50h0JUyi0,4271
+ddtrace/contrib/psycopg/cursor.py,sha256=8yQREYRPy97Z4wTsj1hhP8d0eAMXONss3SDmpGE4Nb0,1041
+ddtrace/contrib/psycopg/extensions.py,sha256=6jnUb4otunC0A57CmdO48XutTaCW7YfQxlqP8XDNR0Y,6777
+ddtrace/contrib/psycopg/patch.py,sha256=oPHIT22RhYAz6jzNOjazgFoHOfMF96sDMLGKFSRqvOs,8132
+ddtrace/contrib/pylibmc/__init__.py,sha256=peMKPM5-dPINxamo8OM0AaXtRMNRcD1HUjZVsz2Mo0Q,1017
+ddtrace/contrib/pylibmc/__pycache__/__init__.cpython-311.pyc,,
+ddtrace/contrib/pylibmc/__pycache__/addrs.cpython-311.pyc,,
+ddtrace/contrib/pylibmc/__pycache__/client.cpython-311.pyc,,
+ddtrace/contrib/pylibmc/__pycache__/patch.cpython-311.pyc,,
+ddtrace/contrib/pylibmc/addrs.py,sha256=S1wHEMy_T_zZWom_p6_u9eyC1W39nn_sXE9gC9nsTGg,365
+ddtrace/contrib/pylibmc/client.py,sha256=aNZuTtUMYfMYwE3EqHDIl4RzOXBPM9_oQ0wXogZb76E,7087
+ddtrace/contrib/pylibmc/patch.py,sha256=ui1_WRVbZ8Pyt3i5R8hugd4mmnV7PwDfgWDNYEQn3Lg,285
+ddtrace/contrib/pymemcache/__init__.py,sha256=TJltXVZEH9LxVdztLjJkRclsqv3ciFcAFy2oDtimfSg,1433
+ddtrace/contrib/pymemcache/__pycache__/__init__.cpython-311.pyc,,
+ddtrace/contrib/pymemcache/__pycache__/client.cpython-311.pyc,,
+ddtrace/contrib/pymemcache/__pycache__/patch.cpython-311.pyc,,
+ddtrace/contrib/pymemcache/client.py,sha256=4rtTVAoReUOZPqdf200BxrDn9__GvhGDhmroku1ogiU,12267
+ddtrace/contrib/pymemcache/patch.py,sha256=zX8yxueKVhU5m-LuZ9rtb7NtFf67evCu8VNf2GY9eNc,1528
+ddtrace/contrib/pymongo/__init__.py,sha256=2efOGg1MJ1JvlBZMqa0wBa0plAZn3SlmYNCpYVL9jaE,1497
+ddtrace/contrib/pymongo/__pycache__/__init__.cpython-311.pyc,,
+ddtrace/contrib/pymongo/__pycache__/client.cpython-311.pyc,,
+ddtrace/contrib/pymongo/__pycache__/parse.cpython-311.pyc,,
+ddtrace/contrib/pymongo/__pycache__/patch.cpython-311.pyc,,
+ddtrace/contrib/pymongo/client.py,sha256=Lks6ebPx9frA7I8UPiLBUyTBgNDug4xUuLgRNOQNz8Y,13064
+ddtrace/contrib/pymongo/parse.py,sha256=kJccHF7TKlWnfrN75duPAkRQeKph75h3LyzON0y_cBs,6292
+ddtrace/contrib/pymongo/patch.py,sha256=r7_VAeSxRcExucmo3IxRrn9SFBNmyFoPTAfKKytHLu4,2611
+ddtrace/contrib/pymysql/__init__.py,sha256=-aqmsgZRHaYRNMzUeCKZntkuVWxdmxIRwXm_S_zdR5Y,1674
+ddtrace/contrib/pymysql/__pycache__/__init__.cpython-311.pyc,,
+ddtrace/contrib/pymysql/__pycache__/patch.cpython-311.pyc,,
+ddtrace/contrib/pymysql/patch.py,sha256=dszSvQTkBVdX726O4eC4xOLCxPeqMqbcpsPstWZEpOg,1552
+ddtrace/contrib/pynamodb/__init__.py,sha256=vz_iCDyV1Q8n2hltDBwY1D4X99CJVGhRxfvBd4Mcruo,1019
+ddtrace/contrib/pynamodb/__pycache__/__init__.cpython-311.pyc,,
+ddtrace/contrib/pynamodb/__pycache__/patch.cpython-311.pyc,,
+ddtrace/contrib/pynamodb/patch.py,sha256=B9Xn4Z5H013kUD2zAHuylodiVMYkWjNXlGYlZJbpW_g,3395
+ddtrace/contrib/pyodbc/__init__.py,sha256=LwxVnw4ji9JZyqY0i_KPUdQDm34BL6FWqKrMcLn3Vtc,1604
+ddtrace/contrib/pyodbc/__pycache__/__init__.cpython-311.pyc,,
+ddtrace/contrib/pyodbc/__pycache__/patch.cpython-311.pyc,,
+ddtrace/contrib/pyodbc/patch.py,sha256=DpRKnDFDx6RQkXjRlkRyvFcKTjmid2FrxPrCrY5tsc4,1689
+ddtrace/contrib/pyramid/__init__.py,sha256=H30eVR2iOoWisvpFupS7VTMXiOqEyC5eQ3O3LakUhnI,1710
+ddtrace/contrib/pyramid/__pycache__/__init__.cpython-311.pyc,,
+ddtrace/contrib/pyramid/__pycache__/constants.cpython-311.pyc,,
+ddtrace/contrib/pyramid/__pycache__/patch.cpython-311.pyc,,
+ddtrace/contrib/pyramid/__pycache__/trace.cpython-311.pyc,,
+ddtrace/contrib/pyramid/constants.py,sha256=69TDS3jZ0kf6M7WlNT60Nv4qOWIiGEcbsCeG8_Qp9vw,310
+ddtrace/contrib/pyramid/patch.py,sha256=oqYCKckucQOeVkpLMo0lKa6gW6EuMewMraD_Ls2NECU,3773
+ddtrace/contrib/pyramid/trace.py,sha256=lAhsmH5Mp7XyaJq9scjNWt9fAwP61pV8WzAkS9pvp2I,5774
+ddtrace/contrib/pytest/__init__.py,sha256=zVzjTq1hiBAEQc-WBmsKFe8yfiITAmr5sfd84hDH9DU,2123
+ddtrace/contrib/pytest/__pycache__/__init__.cpython-311.pyc,,
+ddtrace/contrib/pytest/__pycache__/constants.cpython-311.pyc,,
+ddtrace/contrib/pytest/__pycache__/newhooks.cpython-311.pyc,,
+ddtrace/contrib/pytest/__pycache__/plugin.cpython-311.pyc,,
+ddtrace/contrib/pytest/constants.py,sha256=pqR9gHBrmyU5CyU7My2zeB9KR5BPOQOQv87KJA0FGEw,294
+ddtrace/contrib/pytest/newhooks.py,sha256=kVTKsDqYKrw_VM7MffQAJYKiuHnqUbPa7Qwrk8Wq8pE,1021
+ddtrace/contrib/pytest/plugin.py,sha256=VfR_r106w9p_Tsfkp8AxENRQrHDuQjl6bmNlnbVVHcc,38415
+ddtrace/contrib/pytest_bdd/__init__.py,sha256=4z2Vsify_2RmMeBl-tEBVhxDvOiYm9egr6mh_YKJMM4,1088
+ddtrace/contrib/pytest_bdd/__pycache__/__init__.cpython-311.pyc,,
+ddtrace/contrib/pytest_bdd/__pycache__/constants.cpython-311.pyc,,
+ddtrace/contrib/pytest_bdd/__pycache__/plugin.cpython-311.pyc,,
+ddtrace/contrib/pytest_bdd/constants.py,sha256=hJUsSNAs87r1DIS41T39abbDG3pJpTNC52e2ZPtkvOk,55
+ddtrace/contrib/pytest_bdd/plugin.py,sha256=1tC3dk5ThGqLJk7JhVsvHgrL7yl_CZyWDG5bo0x9wBE,5191
+ddtrace/contrib/pytest_benchmark/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+ddtrace/contrib/pytest_benchmark/__pycache__/__init__.cpython-311.pyc,,
+ddtrace/contrib/pytest_benchmark/__pycache__/constants.cpython-311.pyc,,
+ddtrace/contrib/pytest_benchmark/__pycache__/plugin.cpython-311.pyc,,
+ddtrace/contrib/pytest_benchmark/constants.py,sha256=Qldd-8g3tRjpdgVTMMwL4i49_ROoJ3t0VB9eJ4FeiJo,2134
+ddtrace/contrib/pytest_benchmark/plugin.py,sha256=DkAtkQoQps8AOLNC7NzcsTn2heto0ywkGkE5rxVsVXE,1302
+ddtrace/contrib/redis/__init__.py,sha256=eVvzamumY2gjIPmKfiNk9-ZglLg59Aq6KFkhukVQV3o,1981
+ddtrace/contrib/redis/__pycache__/__init__.cpython-311.pyc,,
+ddtrace/contrib/redis/__pycache__/asyncio_patch.cpython-311.pyc,,
+ddtrace/contrib/redis/__pycache__/patch.cpython-311.pyc,,
+ddtrace/contrib/redis/asyncio_patch.py,sha256=tK-gFeBzd3ohwNKf_0Ha3xnC_eeI2XNhGFavGKNR6uU,1595
+ddtrace/contrib/redis/patch.py,sha256=OJ53h-fbNY4bBHIpvm-n1pL4-QDdFB5IIi513J4pmWU,6933
+ddtrace/contrib/rediscluster/__init__.py,sha256=vAhdTYKt8Cet0L1GkSZbPsEyepRT1sw1uQ6rlMAMSjk,1837
+ddtrace/contrib/rediscluster/__pycache__/__init__.cpython-311.pyc,,
+ddtrace/contrib/rediscluster/__pycache__/patch.cpython-311.pyc,,
+ddtrace/contrib/rediscluster/patch.py,sha256=5dKj2-Qi91yvHtDo1CcUErojELrWaW_yLRSbmQkehBo,4191
+ddtrace/contrib/requests/__init__.py,sha256=hSf34AnfweSRwnPd5vSuyAFGfxgXz7MvSfNdLX85PsA,2280
+ddtrace/contrib/requests/__pycache__/__init__.cpython-311.pyc,,
+ddtrace/contrib/requests/__pycache__/connection.cpython-311.pyc,,
+ddtrace/contrib/requests/__pycache__/constants.cpython-311.pyc,,
+ddtrace/contrib/requests/__pycache__/patch.cpython-311.pyc,,
+ddtrace/contrib/requests/__pycache__/session.cpython-311.pyc,,
+ddtrace/contrib/requests/connection.py,sha256=14QyMr_458cXPmQBFqpFn1hJJ6eLqao05_IBU5lvPmA,5288
+ddtrace/contrib/requests/constants.py,sha256=HY71FxxxJKkCVoCNspOczEf9Ji6QcVDTeDCgYyuA0qU,29
+ddtrace/contrib/requests/patch.py,sha256=prHMxIef4MsTO2bAHUWH7E4sMAQ-nwoZJIb5tDg912w,1448
+ddtrace/contrib/requests/session.py,sha256=_vtXsnkadx3J6cJXjDcz39VKPQIzMVLWf0XyB-hOXm8,512
+ddtrace/contrib/rq/__init__.py,sha256=J9hChEpXvRKNvcUsIe4HdXbRTtqGMcnSn5-iSGFlv24,8596
+ddtrace/contrib/rq/__pycache__/__init__.cpython-311.pyc,,
+ddtrace/contrib/sanic/__init__.py,sha256=yzXGvBedAy-_llonmMe6mroKNE29J0AmZMbo5Kanw2k,1830
+ddtrace/contrib/sanic/__pycache__/__init__.cpython-311.pyc,,
+ddtrace/contrib/sanic/__pycache__/patch.cpython-311.pyc,,
+ddtrace/contrib/sanic/patch.py,sha256=BbFMt18dpIbq3OEnr1W5NHtSo204HERGnZfcx7ZKjGg,9868
+ddtrace/contrib/snowflake/__init__.py,sha256=BD1iYTVozgcwANvNG5keqz4HXNV9ez9_dTUbef7bNko,1916
+ddtrace/contrib/snowflake/__pycache__/__init__.cpython-311.pyc,,
+ddtrace/contrib/snowflake/__pycache__/patch.cpython-311.pyc,,
+ddtrace/contrib/snowflake/patch.py,sha256=YhjNOrjyW4s18SaoFpHDrJ6kk_fIpgQ62w_xZDZvcWs,2715
+ddtrace/contrib/sqlalchemy/__init__.py,sha256=RovvKRxQQuWTjEnRAqw4QZ8tKZtpd5kLyOoJDsZoD_0,1159
+ddtrace/contrib/sqlalchemy/__pycache__/__init__.cpython-311.pyc,,
+ddtrace/contrib/sqlalchemy/__pycache__/engine.cpython-311.pyc,,
+ddtrace/contrib/sqlalchemy/__pycache__/patch.cpython-311.pyc,,
+ddtrace/contrib/sqlalchemy/engine.py,sha256=bsULXVgNUp7mZmDcDTGSkZbmA2y2DIL-4opqd3z-OVo,5475
+ddtrace/contrib/sqlalchemy/patch.py,sha256=sahiUKP2g9PazD2cCvD4tzowi2EkyqrBtGklcPqTJ_E,798
+ddtrace/contrib/sqlite3/__init__.py,sha256=vP1mIts57RrXXdpdveCnCTWxb54ldQRxBMZ2vy9jODc,1604
+ddtrace/contrib/sqlite3/__pycache__/__init__.cpython-311.pyc,,
+ddtrace/contrib/sqlite3/__pycache__/patch.cpython-311.pyc,,
+ddtrace/contrib/sqlite3/patch.py,sha256=UPRCErN3pdWiZaBM2lZSWEfPJt9Xd9PoV11BDVRcA2A,2999
+ddtrace/contrib/starlette/__init__.py,sha256=ohVJF2DVGOi-hWv71b-xUmifIsjrbpQjEZbUDM-i7yc,2317
+ddtrace/contrib/starlette/__pycache__/__init__.cpython-311.pyc,,
+ddtrace/contrib/starlette/__pycache__/patch.cpython-311.pyc,,
+ddtrace/contrib/starlette/patch.py,sha256=iOM0W857ZkHcugmEqwtVPirmijq_DzGVFLeCyAdqR5E,7074
+ddtrace/contrib/structlog/__init__.py,sha256=JJpgY9efpqxiV7D1YqtQL_THivsg1vAJDPgde2_rZ78,1721
+ddtrace/contrib/structlog/__pycache__/__init__.cpython-311.pyc,,
+ddtrace/contrib/structlog/__pycache__/patch.cpython-311.pyc,,
+ddtrace/contrib/structlog/patch.py,sha256=ql3QPna9bCZVQ-_OloNMafW_JJxQOJ5jWcLdnZbTNhU,3133
+ddtrace/contrib/subprocess/__init__.py,sha256=mzY7mir2dYoaumFAWtOG-qTlnc2dbB1gtNDwmeDR81s,1012
+ddtrace/contrib/subprocess/__pycache__/__init__.cpython-311.pyc,,
+ddtrace/contrib/subprocess/__pycache__/constants.cpython-311.pyc,,
+ddtrace/contrib/subprocess/__pycache__/patch.cpython-311.pyc,,
+ddtrace/contrib/subprocess/constants.py,sha256=4f8Nsp5Q7RoYe1jOzh_4o-1gNsZ2mgP4bJgM75ILnwI,585
+ddtrace/contrib/subprocess/patch.py,sha256=bGbFSp1ZSr4d_OEFb3V97yS6RnhDAz-yU0xvl9v4Sd0,14442
+ddtrace/contrib/tornado/__init__.py,sha256=eLRgK_bPQrsB9aMLxEMIDvYplUB4Shoxjx2zFF39Kp4,4605
+ddtrace/contrib/tornado/__pycache__/__init__.cpython-311.pyc,,
+ddtrace/contrib/tornado/__pycache__/application.cpython-311.pyc,,
+ddtrace/contrib/tornado/__pycache__/constants.cpython-311.pyc,,
+ddtrace/contrib/tornado/__pycache__/decorators.cpython-311.pyc,,
+ddtrace/contrib/tornado/__pycache__/handlers.cpython-311.pyc,,
+ddtrace/contrib/tornado/__pycache__/patch.cpython-311.pyc,,
+ddtrace/contrib/tornado/__pycache__/stack_context.cpython-311.pyc,,
+ddtrace/contrib/tornado/__pycache__/template.cpython-311.pyc,,
+ddtrace/contrib/tornado/application.py,sha256=Ivn9_sQC6mM9bhkbyx9yBeUSf_on90Jchwwxfd_Rvlc,1843
+ddtrace/contrib/tornado/constants.py,sha256=PsZUkpzC0uUfshu3QrhvPMj5b669K9w1pXGHgitaK00,205
+ddtrace/contrib/tornado/decorators.py,sha256=qc8t-_uSVKmUERUQOLuNb7j1C5Q1TyGeoy-RK1PNaYA,3368
+ddtrace/contrib/tornado/handlers.py,sha256=jspe5TDJ4vP_p1S6_r4sa1gTSdgsntAQmpW3MbR1CQo,5569
+ddtrace/contrib/tornado/patch.py,sha256=7KGmMwD2AS-4BcBbYa6uiekZZ4_sD0jhxqAkl4-m7U0,1998
+ddtrace/contrib/tornado/stack_context.py,sha256=eKJjaSAlLu78-OvFK2tt8CXAskIyL2LBcaetmLI8tyo,6067
+ddtrace/contrib/tornado/template.py,sha256=7fLfkYziu9djuCl8F0hw_qC3fUzLoLFKkMuSvDXgGSs,1166
+ddtrace/contrib/trace_utils.py,sha256=urNH62swJDJca9ivwGqXmFuRsvAb3_mrXhVLFRNP3Ak,25317
+ddtrace/contrib/trace_utils_async.py,sha256=1b0AQ-_qk97P3Y-mLYLMzl2yo32SNjkhaKiSJ0WdmUw,1059
+ddtrace/contrib/trace_utils_redis.py,sha256=Zr_SiSVXUge57Nb5huQ0o3htyHLdP3zOIRIl2heW9aU,7256
+ddtrace/contrib/unittest/__init__.py,sha256=ItDIm3eH7jHfSeDZ8c4yguf8_WtFt2flkiJ2gVHhO9c,1250
+ddtrace/contrib/unittest/__pycache__/__init__.cpython-311.pyc,,
+ddtrace/contrib/unittest/__pycache__/constants.cpython-311.pyc,,
+ddtrace/contrib/unittest/__pycache__/patch.cpython-311.pyc,,
+ddtrace/contrib/unittest/constants.py,sha256=FqTAdWU1ZqPhTMuZ19Dx1a0XW6FP1jwHylw9ZhRzwPo,246
+ddtrace/contrib/unittest/patch.py,sha256=3nQGZfz-FadYwTj6Tv8MlH4DfbKPiN6X84OIP6dnAWw,35366
+ddtrace/contrib/urllib3/__init__.py,sha256=AMtocBCa7f3H5MgtZTjg1YFX5Wrw2R9fYF6IeE-8ypg,1597
+ddtrace/contrib/urllib3/__pycache__/__init__.cpython-311.pyc,,
+ddtrace/contrib/urllib3/__pycache__/patch.cpython-311.pyc,,
+ddtrace/contrib/urllib3/patch.py,sha256=Y8N4R0GrMuqfSoB9uq3NopEBEslKrfhzMa41hzNS7u0,5290
+ddtrace/contrib/vertica/__init__.py,sha256=H1U-TD0OD6KmZYitew-MvYuMowAGzkXtnV9-chGZW0I,1420
+ddtrace/contrib/vertica/__pycache__/__init__.cpython-311.pyc,,
+ddtrace/contrib/vertica/__pycache__/patch.cpython-311.pyc,,
+ddtrace/contrib/vertica/patch.py,sha256=tKpfdxR4eQ9TZfIZ_yw7KtGQiZe6JOExcEieoiQZqGA,8760
+ddtrace/contrib/wsgi/__init__.py,sha256=1-5ThMEpTx-Jt55Ni1DM4M06j8wTf0zuBB9NjTMd_gQ,868
+ddtrace/contrib/wsgi/__pycache__/__init__.cpython-311.pyc,,
+ddtrace/contrib/wsgi/__pycache__/wsgi.cpython-311.pyc,,
+ddtrace/contrib/wsgi/wsgi.py,sha256=Q8LFKYBoCATkvMQdXNsYdZ5JPof1-iSDrR4T44IhiPU,10523
+ddtrace/contrib/yaaredis/__init__.py,sha256=UiSo4UQXYrsP3Pcc9Yo4IuwHKwD6myL2Eh2BF160kug,2060
+ddtrace/contrib/yaaredis/__pycache__/__init__.cpython-311.pyc,,
+ddtrace/contrib/yaaredis/__pycache__/patch.cpython-311.pyc,,
+ddtrace/contrib/yaaredis/patch.py,sha256=DJFQivzCYqO7QdjzNQ8CP2N9qOSF0DqC6h8CStU0_bQ,2787
+ddtrace/data_streams.py,sha256=y5qpNXHaOgd2wLl5nVImreZW0nfHeLtfQS-osStQZh0,1663
+ddtrace/debugging/__init__.py,sha256=6q2f6Z5_A_PhEu8k70s4mTFy1DWlGAWx3zWEi2x9DVY,806
+ddtrace/debugging/__pycache__/__init__.cpython-311.pyc,,
+ddtrace/debugging/__pycache__/_async.cpython-311.pyc,,
+ddtrace/debugging/__pycache__/_config.cpython-311.pyc,,
+ddtrace/debugging/__pycache__/_debugger.cpython-311.pyc,,
+ddtrace/debugging/__pycache__/_encoding.cpython-311.pyc,,
+ddtrace/debugging/__pycache__/_expressions.cpython-311.pyc,,
+ddtrace/debugging/__pycache__/_metrics.cpython-311.pyc,,
+ddtrace/debugging/__pycache__/_redaction.cpython-311.pyc,,
+ddtrace/debugging/__pycache__/_safety.cpython-311.pyc,,
+ddtrace/debugging/__pycache__/_uploader.cpython-311.pyc,,
+ddtrace/debugging/_async.py,sha256=oAs-UL8NMhrI6GhgecrfRF2fRKG9eko1AMotJ_ZvlvM,758
+ddtrace/debugging/_config.py,sha256=OdbGrspuwbOl9x7u3LbVK_U0u88IfICUIVigJHxqcYY,246
+ddtrace/debugging/_debugger.py,sha256=XEq2UcOqptVL2-CdwNJlLCgDtT4lsSNpEp4HrjU0JS4,31180
+ddtrace/debugging/_encoding.py,sha256=K5ZH75NHm7Qq0ddwG2uKZKqdAPQmqLxNW_kzW51GADo,9307
+ddtrace/debugging/_exception/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+ddtrace/debugging/_exception/__pycache__/__init__.cpython-311.pyc,,
+ddtrace/debugging/_exception/__pycache__/auto_instrument.cpython-311.pyc,,
+ddtrace/debugging/_exception/auto_instrument.py,sha256=dKHEb1JCwJ1c1wd5ZIR3EFkSYuLLSM2sCBdR007t1wc,7011
+ddtrace/debugging/_expressions.py,sha256=T0MLsnLqsggS12C0M36lpnIRp4Gb5MApfsOZ2bcTSFI,13171
+ddtrace/debugging/_function/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+ddtrace/debugging/_function/__pycache__/__init__.cpython-311.pyc,,
+ddtrace/debugging/_function/__pycache__/discovery.cpython-311.pyc,,
+ddtrace/debugging/_function/__pycache__/store.cpython-311.pyc,,
+ddtrace/debugging/_function/discovery.py,sha256=EsAUjo3WIgL9wOgHjnSyExn4opKM_S-w6O05YoYOWMs,8740
+ddtrace/debugging/_function/store.py,sha256=3EKl9AxFDWwI308InM5H-tmgX61AtgagWM08srKJFsU,4340
+ddtrace/debugging/_metrics.py,sha256=Q8MKihuNSIxLLhYy0s3jJs6r7S8owsao7eQBhxC4LZs,223
+ddtrace/debugging/_probe/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+ddtrace/debugging/_probe/__pycache__/__init__.cpython-311.pyc,,
+ddtrace/debugging/_probe/__pycache__/model.cpython-311.pyc,,
+ddtrace/debugging/_probe/__pycache__/registry.cpython-311.pyc,,
+ddtrace/debugging/_probe/__pycache__/remoteconfig.cpython-311.pyc,,
+ddtrace/debugging/_probe/__pycache__/status.cpython-311.pyc,,
+ddtrace/debugging/_probe/model.py,sha256=GOKZjdNH3XD4uPpE9GK6io0XgH043ajqZlG1FoRQ02Y,7585
+ddtrace/debugging/_probe/registry.py,sha256=GB8xaPo-Oqis411ThJQ6hjNrVUeYnX181xM3TZOo7QE,6877
+ddtrace/debugging/_probe/remoteconfig.py,sha256=CoPjHvVL3FR9_FBvFUxBw8ntA68kHQP4FXGao7jD1m0,13092
+ddtrace/debugging/_probe/status.py,sha256=O8vXceuim7R1BEKLctJ9d1eW28qx_2JvvRa2-IbQBfs,5317
+ddtrace/debugging/_redaction.py,sha256=X4qtpuhAfSNvBMJISO8hdS0bNB2T280je8_lDTlLFxs,4182
+ddtrace/debugging/_safety.py,sha256=aBd-fyAs7wqf30LiCfmFG5CzR-xbXb7osnh2nf8W61A,2152
+ddtrace/debugging/_signal/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+ddtrace/debugging/_signal/__pycache__/__init__.cpython-311.pyc,,
+ddtrace/debugging/_signal/__pycache__/collector.cpython-311.pyc,,
+ddtrace/debugging/_signal/__pycache__/metric_sample.cpython-311.pyc,,
+ddtrace/debugging/_signal/__pycache__/model.cpython-311.pyc,,
+ddtrace/debugging/_signal/__pycache__/snapshot.cpython-311.pyc,,
+ddtrace/debugging/_signal/__pycache__/tracing.cpython-311.pyc,,
+ddtrace/debugging/_signal/__pycache__/utils.cpython-311.pyc,,
+ddtrace/debugging/_signal/collector.py,sha256=6esIpqf4jkRVMX32Gi6jQXre2VaspOLV2orByKxjH8w,4323
+ddtrace/debugging/_signal/metric_sample.py,sha256=Pj49TDe3iqFv1eNM-jsFoFVGtc0GaYjor7-1ZkGmuXA,2773
+ddtrace/debugging/_signal/model.py,sha256=7D5rOwtxZtIjB-qR__hZs7JKIJsL9hehwE5jX39Kvrk,4942
+ddtrace/debugging/_signal/snapshot.py,sha256=IRl3zyqv2-HCPzQSV4u7WRc6x_DI61SFGJT5uWI5ags,8547
+ddtrace/debugging/_signal/tracing.py,sha256=rHXS4w6V_AGGbXIJzmSUR8BMWjfqxl3wTA8xaXwRlic,5677
+ddtrace/debugging/_signal/utils.py,sha256=48H65Um7jj7qfK0INw8RvTpbfPMz_yfaK7y2BG3cCXI,9759
+ddtrace/debugging/_uploader.py,sha256=tadYtxcBizkxsXL37FT1Ft5fb8QQYrKao8VIf9COyxE,3546
+ddtrace/ext/__init__.py,sha256=0Ax8C94xQ7C80xKs2_xYzP6RVESKZH2DB3rM1MjyLKQ,473
+ddtrace/ext/__pycache__/__init__.cpython-311.pyc,,
+ddtrace/ext/__pycache__/aws.cpython-311.pyc,,
+ddtrace/ext/__pycache__/cassandra.cpython-311.pyc,,
+ddtrace/ext/__pycache__/ci.cpython-311.pyc,,
+ddtrace/ext/__pycache__/consul.cpython-311.pyc,,
+ddtrace/ext/__pycache__/db.cpython-311.pyc,,
+ddtrace/ext/__pycache__/elasticsearch.cpython-311.pyc,,
+ddtrace/ext/__pycache__/git.cpython-311.pyc,,
+ddtrace/ext/__pycache__/http.cpython-311.pyc,,
+ddtrace/ext/__pycache__/kafka.cpython-311.pyc,,
+ddtrace/ext/__pycache__/kombu.cpython-311.pyc,,
+ddtrace/ext/__pycache__/memcached.cpython-311.pyc,,
+ddtrace/ext/__pycache__/mongo.cpython-311.pyc,,
+ddtrace/ext/__pycache__/net.cpython-311.pyc,,
+ddtrace/ext/__pycache__/redis.cpython-311.pyc,,
+ddtrace/ext/__pycache__/sql.cpython-311.pyc,,
+ddtrace/ext/__pycache__/test.cpython-311.pyc,,
+ddtrace/ext/__pycache__/user.cpython-311.pyc,,
+ddtrace/ext/aws.py,sha256=0t5-ftzdd4eCkpG4udnbvONnSe648nfsuH_D914oGCg,3538
+ddtrace/ext/cassandra.py,sha256=nT1jltcvXRSDddqWHWhFD3cxInDhhKd0ouGghNV3zgQ,191
+ddtrace/ext/ci.py,sha256=4VuQNSLToF0QSM8zt-GrX0BMjy6qxVIyODXT-I-IyhE,21950
+ddtrace/ext/consul.py,sha256=6Vq9cGCCOimwFMyQRR1rx0mnEWYKWn3JSYVihLnkPhk,76
+ddtrace/ext/db.py,sha256=dW0pOWQYQlAri4goGdlVIcDzmx6GjpURPXAMEoOvzt4,325
+ddtrace/ext/elasticsearch.py,sha256=1NbDiLzJYpzs0KVC7W2q8MsHkcayjFjZwasFP6mRSkI,211
+ddtrace/ext/git.py,sha256=BibJ42uXDj9Wnkg2JchffvUgJRiM-OOytHvQS-1GGaQ,14225
+ddtrace/ext/http.py,sha256=TegstAKHfmYMdxQBDdTKd8wHGGNK44xWq2JlECo7evg,447
+ddtrace/ext/kafka.py,sha256=Fwb5SfjCSAS5Q1cunqzTxpGD8MgwskrTJr_ofTYedmg,349
+ddtrace/ext/kombu.py,sha256=JqzezXNOH_wlcrQzNAOeM87v4CnJPFXrNeK9hX-UDTk,228
+ddtrace/ext/memcached.py,sha256=9OhpONkQtGpcQBAcX6AxdMmvWk7J7A8aJfknfemGZ4M,98
+ddtrace/ext/mongo.py,sha256=xLojJ7vdDhEpYaFLcYASCC2iUM0pPcZ83o-7JSgnmgc,96
+ddtrace/ext/net.py,sha256=jkuYQt8qHAceRflb0QNud6ramJXrrLUoSYgrf-7dOo0,215
+ddtrace/ext/redis.py,sha256=MCy19md0SCMkAXohiBxHmeye3f-3WXwbqpfyp9W9DoA,296
+ddtrace/ext/sql.py,sha256=PWbg2AmP3SFey-knezYm71HbFh76KV9K_JKhi0HaRN8,2297
+ddtrace/ext/test.py,sha256=Jlg8nZCsz2PKQWzEM7ZAAMeBypS9yn-bqW4XAlErp0s,2082
+ddtrace/ext/user.py,sha256=v3K0RLPK3xmQFS-jAq2oYclb09TKEsFc61fAJyZQ3DA,149
+ddtrace/filters.py,sha256=YfvhPWtyirFGQibO9Zp7q3GRci9T17CEPqJMbdm3H8A,2602
+ddtrace/internal/README.md,sha256=1uhyNSgCgy-mhrKDfkBxHD76hPffLFZFRGH7ma6JRdk,323
+ddtrace/internal/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+ddtrace/internal/__pycache__/__init__.cpython-311.pyc,,
+ddtrace/internal/__pycache__/agent.cpython-311.pyc,,
+ddtrace/internal/__pycache__/assembly.cpython-311.pyc,,
+ddtrace/internal/__pycache__/atexit.cpython-311.pyc,,
+ddtrace/internal/__pycache__/codeowners.cpython-311.pyc,,
+ddtrace/internal/__pycache__/compat.cpython-311.pyc,,
+ddtrace/internal/__pycache__/constants.cpython-311.pyc,,
+ddtrace/internal/__pycache__/debug.cpython-311.pyc,,
+ddtrace/internal/__pycache__/dogstatsd.cpython-311.pyc,,
+ddtrace/internal/__pycache__/encoding.cpython-311.pyc,,
+ddtrace/internal/__pycache__/forksafe.cpython-311.pyc,,
+ddtrace/internal/__pycache__/gitmetadata.cpython-311.pyc,,
+ddtrace/internal/__pycache__/glob_matching.cpython-311.pyc,,
+ddtrace/internal/__pycache__/hostname.cpython-311.pyc,,
+ddtrace/internal/__pycache__/http.cpython-311.pyc,,
+ddtrace/internal/__pycache__/injection.cpython-311.pyc,,
+ddtrace/internal/__pycache__/log_writer.cpython-311.pyc,,
+ddtrace/internal/__pycache__/logger.cpython-311.pyc,,
+ddtrace/internal/__pycache__/metrics.cpython-311.pyc,,
+ddtrace/internal/__pycache__/module.cpython-311.pyc,,
+ddtrace/internal/__pycache__/packages.cpython-311.pyc,,
+ddtrace/internal/__pycache__/periodic.cpython-311.pyc,,
+ddtrace/internal/__pycache__/rate_limiter.cpython-311.pyc,,
+ddtrace/internal/__pycache__/safety.cpython-311.pyc,,
+ddtrace/internal/__pycache__/sampling.cpython-311.pyc,,
+ddtrace/internal/__pycache__/service.cpython-311.pyc,,
+ddtrace/internal/__pycache__/sma.cpython-311.pyc,,
+ddtrace/internal/__pycache__/tracemethods.cpython-311.pyc,,
+ddtrace/internal/__pycache__/uds.cpython-311.pyc,,
+ddtrace/internal/__pycache__/uwsgi.cpython-311.pyc,,
+ddtrace/internal/_encoding.c,sha256=A7JX7UkPMykG0eNxozk2RM2JVtv-Zda0zvJT7x9FYq0,1637460
+ddtrace/internal/_encoding.cpython-311-x86_64-linux-gnu.so,sha256=k8xvHXdyGo8wTGqkavFJtqdiGV9j-7zMWzojzTg2lgs,357568
+ddtrace/internal/_encoding.pyi,sha256=JNUuxuoZz2D3nKo5lGDUsxA9u1fS6MSffYD8he_hnz8,1063
+ddtrace/internal/_encoding.pyx,sha256=8J2uckAmCWnfzxmqdHx6Lh5mJItwbmGdmM0E29BASGA,36236
+ddtrace/internal/_rand.c,sha256=4UADi5JunmqpTSh5HHjAHiyJLz9mk3ZyYygHrf8XYPQ,296384
+ddtrace/internal/_rand.cpython-311-x86_64-linux-gnu.so,sha256=tTEd4T3xjRJAnmmH9Px71Pqo9olFkujGslzCFyIs38I,64144
+ddtrace/internal/_rand.pyi,sha256=VQEVI10a3YVrb7Bg5adUaUkI0J70QWdt6E0Bt5dH_MY,127
+ddtrace/internal/_rand.pyx,sha256=BXiqrnuT7h3wRsrWN4jhC1bFxfwyHMrDq4Bbxs1yZ5w,5107
+ddtrace/internal/_stdint.h,sha256=DY0jy2g-deZsm-PfT3vWeiGgMoldI8AZX-nQ2qnLSVo,109
+ddtrace/internal/_tagset.c,sha256=xf0v8CcBZcRVul9rgwB7l4y4Qgu4S5E6LNgphbTyox8,434000
+ddtrace/internal/_tagset.cpython-311-x86_64-linux-gnu.so,sha256=GnY8j-hJdHDzv8XllxAb6--gvDcn0RsIEPnuGKXKr9s,118400
+ddtrace/internal/_tagset.pyi,sha256=J-05rpWA3TH8ex_nckcYq8mY0yO1enHCKsm8f_XmL7Y,624
+ddtrace/internal/_tagset.pyx,sha256=6UeR66ZK3PbiCWb_-Sj0srNuxOy8HQmOfztaTDLJGD4,7483
+ddtrace/internal/_utils.h,sha256=eX_-G0pyK96mZ60uiUPeLsNqX5r_9PfMmy9y8fq3hSs,191
+ddtrace/internal/_utils.pxd,sha256=XmIW76IjCJ-yipWMi97KuUYu3LRU_LjHN4YxuEN-W4g,77
+ddtrace/internal/agent.py,sha256=pbfCcLDqpFUbbxtRgGJORAf0SzyiO83s4Z0bxELvvao,3304
+ddtrace/internal/assembly.py,sha256=mcRrXrHGk_caBpsGugJ-pj3EgY4M67AMCPFGjenP7Vw,9198
+ddtrace/internal/atexit.py,sha256=EBzMjEW4_gZ32QkrSQjkDmt9nY_MMspYaUFUVrRVtGA,2607
+ddtrace/internal/buff_converter.h,sha256=lQM0fFqpHtXpaprDrIE9bDXqIlCFbpt_h6nzJS1eRjk,658
+ddtrace/internal/ci_visibility/__init__.py,sha256=C5YraQmpFIH2KMYneZNA2FsBOFTtUKWtCN_xYi-5ODQ,353
+ddtrace/internal/ci_visibility/__pycache__/__init__.cpython-311.pyc,,
+ddtrace/internal/ci_visibility/__pycache__/constants.cpython-311.pyc,,
+ddtrace/internal/ci_visibility/__pycache__/coverage.cpython-311.pyc,,
+ddtrace/internal/ci_visibility/__pycache__/encoder.cpython-311.pyc,,
+ddtrace/internal/ci_visibility/__pycache__/filters.cpython-311.pyc,,
+ddtrace/internal/ci_visibility/__pycache__/git_client.cpython-311.pyc,,
+ddtrace/internal/ci_visibility/__pycache__/recorder.cpython-311.pyc,,
+ddtrace/internal/ci_visibility/__pycache__/utils.cpython-311.pyc,,
+ddtrace/internal/ci_visibility/__pycache__/writer.cpython-311.pyc,,
+ddtrace/internal/ci_visibility/constants.py,sha256=YjMp_YtDGMr776Hi00vexvde0Mep_EJ1GfvPL36Re-s,1967
+ddtrace/internal/ci_visibility/coverage.py,sha256=syHQAPVkWaS5jn_8SSDFcYSmP5PFVz64MpOFSjkJjHQ,5744
+ddtrace/internal/ci_visibility/encoder.py,sha256=9z2RQIen80ly3uqJAUxnHmpKElwtOa_WTsWDzUHtJbo,8005
+ddtrace/internal/ci_visibility/filters.py,sha256=1ayfP0sPlKGweR51MrNKY78b8hoqniGZpxLybYuEXQ4,1211
+ddtrace/internal/ci_visibility/git_client.py,sha256=8gCcNM5XkD6FlCcxH5JF1f14G94pdtHl2pTxYB294RU,21375
+ddtrace/internal/ci_visibility/recorder.py,sha256=kfdA8pDlHf3nONOxWRodKeE8jbpS3a8zZAmouaE4jAk,25338
+ddtrace/internal/ci_visibility/telemetry/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+ddtrace/internal/ci_visibility/telemetry/__pycache__/__init__.cpython-311.pyc,,
+ddtrace/internal/ci_visibility/telemetry/__pycache__/constants.cpython-311.pyc,,
+ddtrace/internal/ci_visibility/telemetry/__pycache__/git.cpython-311.pyc,,
+ddtrace/internal/ci_visibility/telemetry/__pycache__/utils.cpython-311.pyc,,
+ddtrace/internal/ci_visibility/telemetry/constants.py,sha256=NKfiMnsYyUE2UuFGy7GG6jCd1Ur3PiLocjAWh_l8HlE,1384
+ddtrace/internal/ci_visibility/telemetry/git.py,sha256=ic7wWqiPGNiDe9QhpXFhZwUXj0KNsVF_W1D6j8VkP4A,4137
+ddtrace/internal/ci_visibility/telemetry/utils.py,sha256=DEVzVw2llci3NqBRam5lbixKWsiL94l5MUBXJ7tZviY,491
+ddtrace/internal/ci_visibility/utils.py,sha256=wbefZtB9Z_YD3cyTGA4mbWnvHSbBVyTj3DP49DKOyjg,5257
+ddtrace/internal/ci_visibility/writer.py,sha256=1YDHO_UEOh5zXlAb4FmbFZMqhY4IZmENc7Hdm0wU3do,5401
+ddtrace/internal/codeowners.py,sha256=9uses9up7Jq24yGi_qulebiwn2M9Cfx6riQgS5E8pvA,7361
+ddtrace/internal/compat.py,sha256=nRCBVmjlDgxO5ea7bntNuE39heegORK6jph2I5c2dXE,13558
+ddtrace/internal/constants.py,sha256=lBoJ_oZQPv6Vs6izrvOjeZ4XJ5kY3f8ZbaxlBjoL7qo,4521
+ddtrace/internal/core/__init__.py,sha256=8CmrXGXXXHtCJS8LiS83jM1hoj759nai4JQZPgxrXEU,12372
+ddtrace/internal/core/__pycache__/__init__.cpython-311.pyc,,
+ddtrace/internal/core/__pycache__/event_hub.cpython-311.pyc,,
+ddtrace/internal/core/event_hub.py,sha256=fIXb8T_h1VBOoF0LAmWe8AQeNmnt5x8GB0Ndj9T7PvI,3530
+ddtrace/internal/datadog/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+ddtrace/internal/datadog/__pycache__/__init__.cpython-311.pyc,,
+ddtrace/internal/datadog/profiling/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+ddtrace/internal/datadog/profiling/__pycache__/__init__.cpython-311.pyc,,
+ddtrace/internal/datadog/profiling/__pycache__/ddup.cpython-311.pyc,,
+ddtrace/internal/datadog/profiling/__pycache__/utils.cpython-311.pyc,,
+ddtrace/internal/datadog/profiling/_ddup.cpp,sha256=OBhXOKGg_eigy87R85OhkILx1Z6D-SQS7szgV_IlpUE,555023
+ddtrace/internal/datadog/profiling/_ddup.cpython-311-x86_64-linux-gnu.so,sha256=TMkHk1LIW2XuFhFDMcsuOaOOqPLZr92s94xOXjidKv0,3667048
+ddtrace/internal/datadog/profiling/_ddup.pyi,sha256=5BDwwZ0wFSK1OA6jyt_3N-Xq62FP0gP0l4yEP0J_dGo,1181
+ddtrace/internal/datadog/profiling/_ddup.pyx,sha256=oBPnQND4FKaFK2_6H-2KCbcxVpZEMKC1kCkEsY6iY5c,6914
+ddtrace/internal/datadog/profiling/ddup.py,sha256=r-5tknAytd52G7r-zhy6VaJFv4yy_FwrEX1hO8XWn4o,2383
+ddtrace/internal/datadog/profiling/include/exporter.hpp,sha256=a9O2sqkmbcKe6L0doPcwM3hLzx_D0PABi81Jaz775N4,8438
+ddtrace/internal/datadog/profiling/include/interface.hpp,sha256=2fIXtkft9g-gO91qScunLEPshfMzJFoiNi7Dr45NJjQ,2117
+ddtrace/internal/datadog/profiling/src/exporter.cpp,sha256=v0_wQ0JEDl0esx92ySC0WSf6kh1yDWb_4GNuIOYyN20,19149
+ddtrace/internal/datadog/profiling/src/interface.cpp,sha256=I05bqNo4gaCN_kSKhHCd1p5b77uUzTa7pNaH7BiSv-s,6736
+ddtrace/internal/datadog/profiling/utils.py,sha256=ibJDCIZyJAK2qiUFIQzJkHSj-hSJMoWM18uJy7iOYBY,819
+ddtrace/internal/datastreams/__init__.py,sha256=Fb7HvVbejdmBFjPMQyPmdGY1Pho2kRozukRBKD5KE9k,817
+ddtrace/internal/datastreams/__pycache__/__init__.cpython-311.pyc,,
+ddtrace/internal/datastreams/__pycache__/botocore.cpython-311.pyc,,
+ddtrace/internal/datastreams/__pycache__/encoding.cpython-311.pyc,,
+ddtrace/internal/datastreams/__pycache__/fnv.cpython-311.pyc,,
+ddtrace/internal/datastreams/__pycache__/kafka.cpython-311.pyc,,
+ddtrace/internal/datastreams/__pycache__/kombu.cpython-311.pyc,,
+ddtrace/internal/datastreams/__pycache__/processor.cpython-311.pyc,,
+ddtrace/internal/datastreams/__pycache__/utils.cpython-311.pyc,,
+ddtrace/internal/datastreams/botocore.py,sha256=Q1iyGAwVSstVF-Kprl8O4KRbPzThxxL9YPOX5dhsoSo,6761
+ddtrace/internal/datastreams/encoding.py,sha256=SSLbXfaLgxR-jVPx6NnPYfbHbAafqEtHna66VEhzrPA,969
+ddtrace/internal/datastreams/fnv.py,sha256=EfOCo9miVh3ohlsQpqPMa7PvFeG124cwSv1aaP4R4_c,779
+ddtrace/internal/datastreams/kafka.py,sha256=fvMTF7t0TnEjZkDoZtCnVGKe5phZuGce7WR73-bmfbE,4737
+ddtrace/internal/datastreams/kombu.py,sha256=kgFGfXfbp6NSUPRuQaT7g2pJY0xrgLQT44NZl6-HHn0,2050
+ddtrace/internal/datastreams/processor.py,sha256=1ycLrFk-fsHqR__w6TgemUY9QVBC4dkZlBfrT8sNTio,18767
+ddtrace/internal/datastreams/utils.py,sha256=0AU8Wwly06bjP5lSCixR2SZpgTvi7sybYItEsjQk0cM,612
+ddtrace/internal/debug.py,sha256=FeWWWKI7g4xHlMbngqNaYFqsU6c4lE2Dxo18Fpx067A,10650
+ddtrace/internal/dogstatsd.py,sha256=efN0ySeNqX07_oFtQpJiKPmX6QDFHWWUrF2ywXV5zSc,1038
+ddtrace/internal/encoding.py,sha256=h5aSO2sNajjq5XeR-XzQj69lcrPmS_zCZDq85otwHRk,4632
+ddtrace/internal/forksafe.py,sha256=jwvxSkHG5ZfEml47mtIXhxOcqMcQ0HDkteTWriQpQrI,3896
+ddtrace/internal/gitmetadata.py,sha256=pXd1Mv-olBtaCt93LVYaN9Oy0h9i5_rTt4M1H1CxNmU,5197
+ddtrace/internal/glob_matching.py,sha256=BRhphDAbDGKvKqfMd-Z2m84ByljJbJX1nDhz-N_UBiI,1625
+ddtrace/internal/hostname.py,sha256=JxcUHAgbWLtnOAjghUb0MkQ4gHiFka15PwsDSDYJexk,285
+ddtrace/internal/http.py,sha256=F8XLr90sCfPG_KuAPwppsOdJoSwYzWjJB97Yaon3egw,1103
+ddtrace/internal/injection.py,sha256=FqCQipq4LnE_terTZomcnFDdhtnKHmc6Jl4DJY3P_k0,6546
+ddtrace/internal/llmobs/__init__.py,sha256=yVpxe-9lyZ-SOjbgkP2kvsOOmXTwUMna6QpQHYimBOU,62
+ddtrace/internal/llmobs/__pycache__/__init__.cpython-311.pyc,,
+ddtrace/internal/llmobs/__pycache__/writer.cpython-311.pyc,,
+ddtrace/internal/llmobs/integrations/__init__.py,sha256=pptHo2prPaGFp2wFjWJmd-s3s0Zt5nbsTd-5nWOXBk8,261
+ddtrace/internal/llmobs/integrations/__pycache__/__init__.cpython-311.pyc,,
+ddtrace/internal/llmobs/integrations/__pycache__/base.cpython-311.pyc,,
+ddtrace/internal/llmobs/integrations/__pycache__/bedrock.cpython-311.pyc,,
+ddtrace/internal/llmobs/integrations/__pycache__/langchain.cpython-311.pyc,,
+ddtrace/internal/llmobs/integrations/__pycache__/openai.cpython-311.pyc,,
+ddtrace/internal/llmobs/integrations/base.py,sha256=_S42QhGOAlBrSNx2u5RBfJx_UpO0BcHUWGJKx4nToIs,10029
+ddtrace/internal/llmobs/integrations/bedrock.py,sha256=MCSeBcazo9Z_kSeao_Ifn9DeaK1Iq7TUOMpY1F6HOSM,2997
+ddtrace/internal/llmobs/integrations/langchain.py,sha256=tZ7UaWCLU7ZFqRxPoDCxdMcSA83TJRjHkOi63pM6VnA,3091
+ddtrace/internal/llmobs/integrations/openai.py,sha256=0kzXxufGC0pZ1N-a1FQ0uQyNaGZQirSFcZumxkmxzD4,10146
+ddtrace/internal/llmobs/writer.py,sha256=w4Cvh8BfI7wiTgzl8dvU_fORzKddirjaXwnf_opX5Co,4858
+ddtrace/internal/log_writer.py,sha256=rp2JATtkdLWmINqPd_PkUT58qGH5v5BROrMhiLNRmtA,3675
+ddtrace/internal/logger.py,sha256=fpAztptlezDIA80oyoZ-N1ZMZjoUj_nLgRk8m9dA9FM,7597
+ddtrace/internal/metrics.py,sha256=qpPVykItcuEs_Y6GGxFyxf10FQPlYYDgIVDW1Fe3QiU,3151
+ddtrace/internal/module.py,sha256=n6LUvLreMXwkjkXUDJdiemGMKv77-8eB0Ump0fnKE-E,19554
+ddtrace/internal/pack.h,sha256=VnFr2XJ8XJHaYh62dJSsrNhmgZPJBgOxSkoIuNgfoQg,2534
+ddtrace/internal/pack_template.h,sha256=dIcX-OWgFLwJBeVRxKmShwhs4nvGfYUrqDewJAFyvjw,39017
+ddtrace/internal/packages.py,sha256=Mvl9Grw00vXEXFdetAYmEeYn_qH025XDmfDiuvavLEQ,4343
+ddtrace/internal/periodic.py,sha256=BYrTDM5fA8t7CPQcmtlkfnCMfT41ecvuUk2cC0mi1wY,5180
+ddtrace/internal/processor/__init__.py,sha256=P6T_QmsAqT9rtTOOkgtDGQtzMmLmhR_uI9kGpclZrJE,2342
+ddtrace/internal/processor/__pycache__/__init__.cpython-311.pyc,,
+ddtrace/internal/processor/__pycache__/endpoint_call_counter.cpython-311.pyc,,
+ddtrace/internal/processor/__pycache__/stats.cpython-311.pyc,,
+ddtrace/internal/processor/__pycache__/trace.cpython-311.pyc,,
+ddtrace/internal/processor/endpoint_call_counter.py,sha256=pIAEWYdt0CERXV-9SBPlEqTfA7uyF_8oE1FoYw4Zd8E,1394
+ddtrace/internal/processor/stats.py,sha256=SP3aWerJD-47MeUFG7PgxU8ZIpGwJRvgdLwUBUgAU-4,9324
+ddtrace/internal/processor/trace.py,sha256=xfJDag2Bbv0v4KY9KCj7WK41X0USCIYvvaXKJGcQ2a0,15789
+ddtrace/internal/rate_limiter.py,sha256=Da2oKLfGF7RD-NvMBm3WzTMEETqha5ANP1fEqeaELHw,8974
+ddtrace/internal/remoteconfig/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+ddtrace/internal/remoteconfig/__pycache__/__init__.cpython-311.pyc,,
+ddtrace/internal/remoteconfig/__pycache__/_connectors.cpython-311.pyc,,
+ddtrace/internal/remoteconfig/__pycache__/_publishers.cpython-311.pyc,,
+ddtrace/internal/remoteconfig/__pycache__/_pubsub.cpython-311.pyc,,
+ddtrace/internal/remoteconfig/__pycache__/_subscribers.cpython-311.pyc,,
+ddtrace/internal/remoteconfig/__pycache__/client.cpython-311.pyc,,
+ddtrace/internal/remoteconfig/__pycache__/constants.cpython-311.pyc,,
+ddtrace/internal/remoteconfig/__pycache__/utils.cpython-311.pyc,,
+ddtrace/internal/remoteconfig/__pycache__/worker.cpython-311.pyc,,
+ddtrace/internal/remoteconfig/_connectors.py,sha256=JorVJOVB6Z7_IzZmj_ILq_UYwX4PUCdXG3Z5cOR5CXU,3386
+ddtrace/internal/remoteconfig/_publishers.py,sha256=E2wv4iRGD16Ky9M-d3Tx5iFXMkl781QeamPiR0uXD40,5479
+ddtrace/internal/remoteconfig/_pubsub.py,sha256=R29hzWRueVHSLrhfzqrPWZDIPqqMAkhNgLdlL3plUW4,5456
+ddtrace/internal/remoteconfig/_subscribers.py,sha256=kMWhI_qN41Y4HGou4y3oFGH92sj0PqV4g8n_RAQ9ceM,2212
+ddtrace/internal/remoteconfig/client.py,sha256=uG9HoFfjq45gu3I3uwnH1Umt89-Hbpb_CZMtyOmzvFU,22235
+ddtrace/internal/remoteconfig/constants.py,sha256=cUe9H0q6O1wMYA4fyM54CbfgtqAmTJDeRGp6qSN9vyU,83
+ddtrace/internal/remoteconfig/utils.py,sha256=x6lFRbYU9baAU4wpD8EApw8ZLTk5eo9HIur9oMYh010,132
+ddtrace/internal/remoteconfig/worker.py,sha256=LzVerzeL6N3lU1qhJHvPxJZkjjZNDt3-d8ltLb3U554,6498
+ddtrace/internal/runtime/__init__.py,sha256=mtD3VXezLQmvYO1gpGVwPVDQjSIfh27fnCI-wkq45_Y,1049
+ddtrace/internal/runtime/__pycache__/__init__.cpython-311.pyc,,
+ddtrace/internal/runtime/__pycache__/collector.cpython-311.pyc,,
+ddtrace/internal/runtime/__pycache__/constants.cpython-311.pyc,,
+ddtrace/internal/runtime/__pycache__/container.cpython-311.pyc,,
+ddtrace/internal/runtime/__pycache__/metric_collectors.cpython-311.pyc,,
+ddtrace/internal/runtime/__pycache__/runtime_metrics.cpython-311.pyc,,
+ddtrace/internal/runtime/__pycache__/tag_collectors.cpython-311.pyc,,
+ddtrace/internal/runtime/collector.py,sha256=ZqeQ2lVLgMXrmisQPE4Nm-g0fbjfdVP1vsDeEMICzDM,3117
+ddtrace/internal/runtime/constants.py,sha256=tm1iOlpgtGM0a0uXubjnoDWUCMqB1DzjoNTFm0Z5ZaY,1036
+ddtrace/internal/runtime/container.py,sha256=fCd_Ge1SB0chqPaQptciMs-RvrCd2dUnX4qxC1q6O90,3838
+ddtrace/internal/runtime/metric_collectors.py,sha256=YxTwAQVJJlnjp543RMLGN9vxK202q6Zz3EXERNOx-VQ,2930
+ddtrace/internal/runtime/runtime_metrics.py,sha256=Bouu18-AyB6DEef-qzTK1y3RNY6wQPnsyiVJSrs05J8,5738
+ddtrace/internal/runtime/tag_collectors.py,sha256=7syAU56F5Y8snWyY3vDxo4fxswysiH11Tu7_59nHNR4,2338
+ddtrace/internal/safety.py,sha256=R9SVrTeahuaPcpKUvCuXVr9DB5x9MipRCqGfa_ydj6c,4099
+ddtrace/internal/sampling.py,sha256=rs5MVlbcuiZ29YSXthjrTOIf9WLdrVPlUzNTuIG0qBs,10577
+ddtrace/internal/schema/__init__.py,sha256=X8UqZKBuI8aRpQdJ6JjynG0oqMjKay78P5SCOkZLbfo,2558
+ddtrace/internal/schema/__pycache__/__init__.cpython-311.pyc,,
+ddtrace/internal/schema/__pycache__/span_attribute_schema.cpython-311.pyc,,
+ddtrace/internal/schema/span_attribute_schema.py,sha256=TH6cLDqAhG9SLUsjJZTW0TdpFc1Un8ghgCX7TRYSOZU,3693
+ddtrace/internal/serverless/__init__.py,sha256=3y6rA68TpzSYSK4T96vrPK3l5jiEIETFYJyaqNifhmo,1791
+ddtrace/internal/serverless/__pycache__/__init__.cpython-311.pyc,,
+ddtrace/internal/serverless/__pycache__/mini_agent.cpython-311.pyc,,
+ddtrace/internal/serverless/mini_agent.py,sha256=NPyY8h-mgtf_HUN1U6lTxIBVVa1lQxKGPXYensUADW4,1507
+ddtrace/internal/service.py,sha256=47l3CMtCbGkVjKQcd41MNKRVyikEa0tm-aFOTghacC0,2957
+ddtrace/internal/sma.py,sha256=CZJTxxoBnFeMN0_xgn-cms3y3BQfMCBTX0Z_H4N1WVY,1601
+ddtrace/internal/sysdep.h,sha256=d2ZWwMOPyhrZctSVL52Ctvrg4jaOGiPjZKOCe8NJLgU,7597
+ddtrace/internal/telemetry/__init__.py,sha256=VpcHpiqW0mZmK2WNc6PhoMn9IHE1DCTzUjgQS6-j1sw,2596
+ddtrace/internal/telemetry/__pycache__/__init__.cpython-311.pyc,,
+ddtrace/internal/telemetry/__pycache__/constants.cpython-311.pyc,,
+ddtrace/internal/telemetry/__pycache__/data.cpython-311.pyc,,
+ddtrace/internal/telemetry/__pycache__/metrics.cpython-311.pyc,,
+ddtrace/internal/telemetry/__pycache__/metrics_namespaces.cpython-311.pyc,,
+ddtrace/internal/telemetry/__pycache__/writer.cpython-311.pyc,,
+ddtrace/internal/telemetry/constants.py,sha256=AaJz5TFGQ0HP6d_FheUPkAxcUI-WtnIf0KRnHTg5Erw,3868
+ddtrace/internal/telemetry/data.py,sha256=mrM-D2nZrDKne7C25g13nIXBcwH-s2poZhG6yXYiIw4,3992
+ddtrace/internal/telemetry/metrics.py,sha256=FZPRofKBa7MYmZ6aIFG_GcCa9RurYKsvK_AEEB7Au9w,5074
+ddtrace/internal/telemetry/metrics_namespaces.py,sha256=ZdofJKRQwo4khl3IM_v6xMxyfYx77gLbCTXVEREy_Ak,2495
+ddtrace/internal/telemetry/writer.py,sha256=m0j9-glfvqquDwbFMUiXsPVppBKyZg9bZBut9w5OUac,33941
+ddtrace/internal/tracemethods.py,sha256=tt9_-ghcywLVpDvs38B-epdvZaJLcftt2bn5ME8sVkk,4229
+ddtrace/internal/uds.py,sha256=Pcf8sWhGzh-MCcEMwlyaDsq7M-OQqH4DryRpHIP1D6c,848
+ddtrace/internal/utils/__init__.py,sha256=pAW2h7Vl9oJGDGEl9mvk_mjTuAwbUxP7Nmluf6UNLpk,2822
+ddtrace/internal/utils/__pycache__/__init__.cpython-311.pyc,,
+ddtrace/internal/utils/__pycache__/attrdict.cpython-311.pyc,,
+ddtrace/internal/utils/__pycache__/cache.cpython-311.pyc,,
+ddtrace/internal/utils/__pycache__/config.cpython-311.pyc,,
+ddtrace/internal/utils/__pycache__/deprecations.cpython-311.pyc,,
+ddtrace/internal/utils/__pycache__/formats.cpython-311.pyc,,
+ddtrace/internal/utils/__pycache__/http.cpython-311.pyc,,
+ddtrace/internal/utils/__pycache__/importlib.cpython-311.pyc,,
+ddtrace/internal/utils/__pycache__/inspection.cpython-311.pyc,,
+ddtrace/internal/utils/__pycache__/retry.cpython-311.pyc,,
+ddtrace/internal/utils/__pycache__/signals.cpython-311.pyc,,
+ddtrace/internal/utils/__pycache__/time.cpython-311.pyc,,
+ddtrace/internal/utils/__pycache__/version.cpython-311.pyc,,
+ddtrace/internal/utils/__pycache__/wrappers.cpython-311.pyc,,
+ddtrace/internal/utils/attrdict.py,sha256=-zaPkaVMhBzwWABgVs1MM6R5At53aCEDzebuWMeeD1M,1181
+ddtrace/internal/utils/cache.py,sha256=hupOr0tIsga9X4hNE6eIKSy40wP7rU_tm8lgzg4C3I4,4189
+ddtrace/internal/utils/config.py,sha256=T-t1Wj8qUOmQ98TBaZNb9o10X9zaLMEYphwNniNLBCw,446
+ddtrace/internal/utils/deprecations.py,sha256=l2a0vrVpTd9lm1DI9KFf8WdD0_YxzNWk9uvUnokefbs,369
+ddtrace/internal/utils/formats.py,sha256=7k6w76X8oGmnsSircnNcyyejrHDjT980LA5xI5N3ycA,5180
+ddtrace/internal/utils/http.py,sha256=wqctaZP4cvwCDGi3sbGQLITFAgpbZX7uYaMVnvlu1Qc,16031
+ddtrace/internal/utils/importlib.py,sha256=nfCdh0p50JqvpBjTI093rMgSzAaJQNWBmXRU05tiuJg,1404
+ddtrace/internal/utils/inspection.py,sha256=C2pLZr6GBHmlk1PAIQrNOdp_NVUTcErjyzJWD29cq3k,4450
+ddtrace/internal/utils/retry.py,sha256=3FxbFjM1C23tcbAzsYq0Z7Qzdqi3mqhAvumTUl6V7-U,1657
+ddtrace/internal/utils/signals.py,sha256=PZSn4_YNMALZXLhpo333n2LijDcegNYrAQJibXPvnQw,866
+ddtrace/internal/utils/time.py,sha256=lmK2CUxarFj4IMF-18QspZ08iAIb48FoTx-Jjj-xj28,4113
+ddtrace/internal/utils/version.py,sha256=HATPlVAyNyIHXip-0_-8uBhYnomBzYm1Tr0XYsGcLts,2937
+ddtrace/internal/utils/wrappers.py,sha256=9sF0cdQQb7NTZ5fAbiXNgs-Z57mFkK6iv5u4Q04mBOw,632
+ddtrace/internal/uwsgi.py,sha256=ZpAZEcrP9F32dM5uAO2xgXDW8Rzo3iv1j5Ih6BMojco,2770
+ddtrace/internal/wrapping/__init__.py,sha256=24xDiCnr0Atilv8Dj16ORl78O6Jw1X2nZiMFSecISec,9840
+ddtrace/internal/wrapping/__pycache__/__init__.cpython-311.pyc,,
+ddtrace/internal/wrapping/__pycache__/asyncs.cpython-311.pyc,,
+ddtrace/internal/wrapping/__pycache__/generators.cpython-311.pyc,,
+ddtrace/internal/wrapping/asyncs.py,sha256=vD66Tf836TbXCYE2SxxoD7U-fXICgJVYQcAaTVX93f4,19872
+ddtrace/internal/wrapping/generators.py,sha256=q7TrbVGf4Pl3kEqrjCbzmLC1NiL3GGCUeihFBTjvGqE,13579
+ddtrace/internal/writer/__init__.py,sha256=rOoVAxiRLPg-F6K_7AatS8iwWlQr8Fqtxi-AWE2adYY,431
+ddtrace/internal/writer/__pycache__/__init__.cpython-311.pyc,,
+ddtrace/internal/writer/__pycache__/writer.cpython-311.pyc,,
+ddtrace/internal/writer/__pycache__/writer_client.cpython-311.pyc,,
+ddtrace/internal/writer/writer.py,sha256=fdW961Rmt_OoljdMme-o31SK_OP9ArLmaNCNIR4l7C8,24201
+ddtrace/internal/writer/writer_client.py,sha256=yY0T5VbCzRF_xWHVyKxlTuOrmK9oXYPUQSfGiRhYeb0,1198
+ddtrace/opentelemetry/__init__.py,sha256=SHqVDpXH5Od-0tffHvOX41YI5eBc16I043MXgUaWXEw,3455
+ddtrace/opentelemetry/__pycache__/__init__.cpython-311.pyc,,
+ddtrace/opentelemetry/__pycache__/_context.cpython-311.pyc,,
+ddtrace/opentelemetry/__pycache__/_span.cpython-311.pyc,,
+ddtrace/opentelemetry/__pycache__/_trace.cpython-311.pyc,,
+ddtrace/opentelemetry/_context.py,sha256=7ofcRQ8jMlpfEXsOrGTr70_pet_wQLL7XaYR44DLb9Q,3881
+ddtrace/opentelemetry/_span.py,sha256=Iu3WTVLh0DcZao5GOVjiO3TWICo98QBM-mMpjhF-fLE,11663
+ddtrace/opentelemetry/_trace.py,sha256=HEBJ3g-Sv7N5167v0NcHmpx56mqF5xLzUbiHkcIY5JM,6039
+ddtrace/opentracer/__init__.py,sha256=Z2NreXCsKituj_xkrBrxox6dJME-HY5dAQdoPrT6P4A,121
+ddtrace/opentracer/__pycache__/__init__.cpython-311.pyc,,
+ddtrace/opentracer/__pycache__/helpers.cpython-311.pyc,,
+ddtrace/opentracer/__pycache__/settings.cpython-311.pyc,,
+ddtrace/opentracer/__pycache__/span.cpython-311.pyc,,
+ddtrace/opentracer/__pycache__/span_context.cpython-311.pyc,,
+ddtrace/opentracer/__pycache__/tags.cpython-311.pyc,,
+ddtrace/opentracer/__pycache__/tracer.cpython-311.pyc,,
+ddtrace/opentracer/__pycache__/utils.cpython-311.pyc,,
+ddtrace/opentracer/helpers.py,sha256=3Ayxc9IgQPRxgSwEFb2ChKJmfVpn_syv1crODBUYFcg,491
+ddtrace/opentracer/propagation/__init__.py,sha256=dM61Rvq_oyksFQii8bRAUFjm2CZXksIsSf_5fSCjoQw,71
+ddtrace/opentracer/propagation/__pycache__/__init__.cpython-311.pyc,,
+ddtrace/opentracer/propagation/__pycache__/binary.cpython-311.pyc,,
+ddtrace/opentracer/propagation/__pycache__/http.cpython-311.pyc,,
+ddtrace/opentracer/propagation/__pycache__/propagator.cpython-311.pyc,,
+ddtrace/opentracer/propagation/__pycache__/text.cpython-311.pyc,,
+ddtrace/opentracer/propagation/binary.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+ddtrace/opentracer/propagation/http.py,sha256=arRf_w6-nP5gi_bJKWpeC-8G8rXTxalUF1eazBwJveo,2562
+ddtrace/opentracer/propagation/propagator.py,sha256=It4jZZ0bf2yEwngyHLyXofoFd6WxVz1HqNmUTwziAdY,230
+ddtrace/opentracer/propagation/text.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+ddtrace/opentracer/settings.py,sha256=yinHnUraYQvARuRbs-CqpBKwg3pncm6umSdk8vT33VQ,1005
+ddtrace/opentracer/span.py,sha256=5RZDaDrNA6Gp_oCOBNA13cJsjKFVHirjnUVdQ-5UIuI,6494
+ddtrace/opentracer/span_context.py,sha256=RIsesab64GauFgWnolw9E4lDP9I6HTVsI2veg8hqbIw,2262
+ddtrace/opentracer/tags.py,sha256=J7h3lOXHloO69vDaMlERZCUxuEDnb8pz36jSKImPslE,466
+ddtrace/opentracer/tracer.py,sha256=SXwuAE6_4t1dM2bYp5rTYBJupBGh7EkWhq0jPdOajhE,16164
+ddtrace/opentracer/utils.py,sha256=oNiXe2G49awvP4CsYngbzmCpfVycF_u0LSv4RXFa8OQ,2171
+ddtrace/pin.py,sha256=cAXUJEMZdqWXyR9_oRqn7RmswXNv4Z-Oxm-mK3OMsdQ,7267
+ddtrace/profiling/__init__.py,sha256=55iYcZRG6WJMVRH5cF_iocmCVlPa3HyIVayeqimh9-Y,583
+ddtrace/profiling/__pycache__/__init__.cpython-311.pyc,,
+ddtrace/profiling/__pycache__/_asyncio.cpython-311.pyc,,
+ddtrace/profiling/__pycache__/_traceback.cpython-311.pyc,,
+ddtrace/profiling/__pycache__/auto.cpython-311.pyc,,
+ddtrace/profiling/__pycache__/event.cpython-311.pyc,,
+ddtrace/profiling/__pycache__/profiler.cpython-311.pyc,,
+ddtrace/profiling/__pycache__/recorder.cpython-311.pyc,,
+ddtrace/profiling/__pycache__/scheduler.cpython-311.pyc,,
+ddtrace/profiling/_asyncio.py,sha256=uQn3gCMVjTEAuZuzr6VUxz4aAp2pdLAz9olRXtL6m_U,1852
+ddtrace/profiling/_build.c,sha256=-IPUlnGO13bHG4wHBSnixKctNjQHPy8sB3JQd4SfyeY,166988
+ddtrace/profiling/_build.cpython-311-x86_64-linux-gnu.so,sha256=ALhe1kK1EOtef-zXPnx9TkQb_cxdKLR4oKJE610ywtw,28336
+ddtrace/profiling/_build.pyi,sha256=qScjYCL2Bcn8kdVuBq80FWiOFoWnLAnUTER93amencA,58
+ddtrace/profiling/_build.pyx,sha256=3ScNzOvIqQswf00e3gswCDAJ2_vcvXzEAxKYTXiFuM0,146
+ddtrace/profiling/_threading.c,sha256=UEgDY9-nf_9GKN50BrzVp0_UWv6kJWPAx2L8L1eiBZg,477347
+ddtrace/profiling/_threading.cpython-311-x86_64-linux-gnu.so,sha256=WStL_UjyA8AeoFa2_6i9g1creGOC0n9gDNoF2ve4kPQ,120872
+ddtrace/profiling/_threading.pyi,sha256=KiFDR9TLKLjmCuQN6V26nBThaF6nGuyOlzGC8IKa4Vg,379
+ddtrace/profiling/_threading.pyx,sha256=4rW6W_5qQ10ycisDqr3cUXh8vUvkJNHNDPKQn3G1fAw,4884
+ddtrace/profiling/_traceback.py,sha256=gtTV5xW0HKO2azPwCNlx2PdJ0KnM4TcKo7EeJm8cSrU,111
+ddtrace/profiling/auto.py,sha256=9FKbtu11hUCOsN4PrtRkDTVXRGBSIh2REzpaOhBW0tA,294
+ddtrace/profiling/bootstrap/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+ddtrace/profiling/bootstrap/__pycache__/__init__.cpython-311.pyc,,
+ddtrace/profiling/bootstrap/__pycache__/sitecustomize.cpython-311.pyc,,
+ddtrace/profiling/bootstrap/sitecustomize.py,sha256=zSmUmKEOjy4hq1Jiw3yIrhGUQRCl7W3LAHLlLbRbvU0,431
+ddtrace/profiling/collector/__init__.py,sha256=vRaJ6PmEuxDlatggriSI4DoiWR_oRH-oN8YKZ_6s2iU,2091
+ddtrace/profiling/collector/__pycache__/__init__.cpython-311.pyc,,
+ddtrace/profiling/collector/__pycache__/_lock.cpython-311.pyc,,
+ddtrace/profiling/collector/__pycache__/asyncio.cpython-311.pyc,,
+ddtrace/profiling/collector/__pycache__/memalloc.cpython-311.pyc,,
+ddtrace/profiling/collector/__pycache__/stack_event.cpython-311.pyc,,
+ddtrace/profiling/collector/__pycache__/threading.cpython-311.pyc,,
+ddtrace/profiling/collector/_lock.py,sha256=JGnZlXdf6sJrlxImsblKGsNBFkwdtiagsK7CbGRB64Q,7993
+ddtrace/profiling/collector/_memalloc.c,sha256=Ml4NnFRwLG94t38ohmCz6gzoY-JkniR0w2c_0wbCl_E,16129
+ddtrace/profiling/collector/_memalloc.cpython-311-x86_64-linux-gnu.so,sha256=eeYgPdCBc-HqNT-ipxO18TfL3a7muo9CQmobrpuL1ZU,28688
+ddtrace/profiling/collector/_memalloc.pyi,sha256=ENw5uCdruLqV36nO1QabiG-od5pEpelbFQSeOIMdlbA,463
+ddtrace/profiling/collector/_memalloc_heap.c,sha256=RZzQo-eQBrof43OUgV7-BsGbpVXgtgeNGDyCIvPkZxg,7257
+ddtrace/profiling/collector/_memalloc_heap.h,sha256=6u-TDLfnzpWonCYBxNBuP-m3cvYXIcFhciQlnh7sTVw,765
+ddtrace/profiling/collector/_memalloc_reentrant.h,sha256=0aoD1DdLXUSl81Bhp2Su5sIy1xVz8uQOvrj9Mmi1uIQ,1055
+ddtrace/profiling/collector/_memalloc_tb.c,sha256=Dl5TeSDTDxqLtFHlMV-nHzsuCRiSDBmTYnc9UTHD0C8,6574
+ddtrace/profiling/collector/_memalloc_tb.h,sha256=vOjplniALMGIaJa1w7pXyY1Tja-9dfc0UgcXZF5IIyg,1517
+ddtrace/profiling/collector/_pymacro.h,sha256=qpyla0EzoHaikKmLVUG4uRYHf0Qop3r1RuWC_kwKQ-w,344
+ddtrace/profiling/collector/_task.c,sha256=jTkmFu0scXUaptjL2s1ztWsOYmKIbD8_YgSk3j8DcAM,471728
+ddtrace/profiling/collector/_task.cpython-311-x86_64-linux-gnu.so,sha256=MX8pEldhkEUgbBzxW3oG0CA6zBS-S6Ijj9vU-gusPZY,120712
+ddtrace/profiling/collector/_task.pyi,sha256=S3p21udMqXIsTNSVTxdKjoxXR_nDXQK3jnk478lB14U,241
+ddtrace/profiling/collector/_task.pyx,sha256=l-gF6mqMeW1VDkBCTX37m4_jjSpAATvkXBxM9OPlxF0,4655
+ddtrace/profiling/collector/_traceback.c,sha256=I8oYytqC0EncJMwytbNfYJJW1p_d7aoq5F8QUHpgcnE,361265
+ddtrace/profiling/collector/_traceback.cpython-311-x86_64-linux-gnu.so,sha256=oQ77Q3v1v0CVlgAb90hcoPoZvmb6Xsqd2nr5KDoOWIE,87768
+ddtrace/profiling/collector/_traceback.pyi,sha256=pFYKACbj-50H5U2OUNYdiQt-N9rWBE3lKJ4la_7kCN8,302
+ddtrace/profiling/collector/_traceback.pyx,sha256=72dvyn_meGBX09H9DoOpWJkU82Yzq5ylzWnIX8kl1XE,3591
+ddtrace/profiling/collector/_utils.h,sha256=zVoIoSfw9BSin0AO9OnkfyNKG_VGCU5hV4KWlCXatvM,10273
+ddtrace/profiling/collector/asyncio.py,sha256=IRhMR3xsYY8JTThQFOXaRNIFTTwCWznF3bAGObJYDyo,1250
+ddtrace/profiling/collector/memalloc.py,sha256=TUAO6s1BEVW0rJAjs7TEuRlskQSqF87NFnFieSs-6m8,8544
+ddtrace/profiling/collector/stack.c,sha256=96rpcYisDuZWJi6OdNW8ijCRDPixQYlRDNIf1MVhd_8,970390
+ddtrace/profiling/collector/stack.cpython-311-x86_64-linux-gnu.so,sha256=oNCm9K-aMulWmO6xsw4ui2z2JV_fV8A4fdBem5vcDzw,300672
+ddtrace/profiling/collector/stack.pyi,sha256=S1UO1iCbwIrI3V5GYplF1dXsxUDqW7R40T0tw-m37Rk,166
+ddtrace/profiling/collector/stack.pyx,sha256=MuBGNubfA3Mqfx0Lgc7cg_-WVfO3aWoSD7Z5QHl-NDs,20601
+ddtrace/profiling/collector/stack_event.py,sha256=2NzWb6KzjCZLVAhZq_LmJbs1OWSGKraBJXMKVvz1Q8E,547
+ddtrace/profiling/collector/threading.py,sha256=b9TK0j_nBX71GePtrtWSUDGYtXJvQq9Eg5gAPEcHJTo,944
+ddtrace/profiling/event.py,sha256=sRnYKPC15VEDnjcmqFPumMuosVfO-deOv7z1NZ_0jas,2272
+ddtrace/profiling/exporter/__init__.py,sha256=eXAfV1Fa6YIpsJLI23wZP_3CQOlKsj-qFPK6jtZvSmI,950
+ddtrace/profiling/exporter/__pycache__/__init__.cpython-311.pyc,,
+ddtrace/profiling/exporter/__pycache__/file.cpython-311.pyc,,
+ddtrace/profiling/exporter/__pycache__/http.cpython-311.pyc,,
+ddtrace/profiling/exporter/__pycache__/pprof_312_pb2.cpython-311.pyc,,
+ddtrace/profiling/exporter/__pycache__/pprof_319_pb2.cpython-311.pyc,,
+ddtrace/profiling/exporter/__pycache__/pprof_3_pb2.cpython-311.pyc,,
+ddtrace/profiling/exporter/__pycache__/pprof_421_pb2.cpython-311.pyc,,
+ddtrace/profiling/exporter/file.py,sha256=09gHkRknzieU6r4TXMD0eb2oJ93pl0W00cvK068bwvY,1292
+ddtrace/profiling/exporter/http.py,sha256=uUaOuXVJo5YGRRQxh3lrH_gAz3FdlFpbGRRVB-ss66A,9387
+ddtrace/profiling/exporter/pprof.c,sha256=ynWy8C3UZ9Fq0gWSpSSBzZ2sI_4a8wZRB3-aYEVtuGE,1467390
+ddtrace/profiling/exporter/pprof.cpython-311-x86_64-linux-gnu.so,sha256=PbajgOQxi__H25pvJmEKAWUJbQN1ZVLwILT_hCPtcbE,516968
+ddtrace/profiling/exporter/pprof.proto,sha256=iWP1lCz5SeZQVec025rSgjwR6pX8WReAtRv3Eho0lJ4,8841
+ddtrace/profiling/exporter/pprof.pyi,sha256=eEfLtOjXlwXwGOJ1nS75_C4TTFfSbinYL6r9D_iDgW8,3917
+ddtrace/profiling/exporter/pprof.pyx,sha256=tHuHWOdNTPZiluBHTYybuIwyfIGH_s_8TogpEN6C1Ow,28222
+ddtrace/profiling/exporter/pprof_312_pb2.py,sha256=HuWbIP1UWvcAU5EhOYmXt42aW-5UfoD0oyQ-CPongs4,28539
+ddtrace/profiling/exporter/pprof_319_pb2.py,sha256=NDbRjoKkKw25uVofgPBYEvhbdSGKLUA_7zEemvrZXfE,6062
+ddtrace/profiling/exporter/pprof_3_pb2.py,sha256=lj4Su40oadwotRGrDGW8lG774Vs7d-Pu7AsEt_hzSuI,24821
+ddtrace/profiling/exporter/pprof_421_pb2.py,sha256=jgpfHGCEvbfvXF3ChGF5Iw-S6XDQ3yu3_2FB7LVGQCI,3736
+ddtrace/profiling/profiler.py,sha256=KN7irjGLDWjSRfzPmMfK1F5mBilivG7wsSue74r2hF0,14943
+ddtrace/profiling/recorder.py,sha256=XTHoRz1y6hNa25jkY3QZ2dU3nQMQsEwbXJCqhYutUUo,3127
+ddtrace/profiling/scheduler.py,sha256=IcaSJ86hm45ItRJczmY3lBCsl7OsGtqd0m68Epm3ehM,4089
+ddtrace/propagation/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+ddtrace/propagation/__pycache__/__init__.cpython-311.pyc,,
+ddtrace/propagation/__pycache__/_database_monitoring.cpython-311.pyc,,
+ddtrace/propagation/__pycache__/_utils.cpython-311.pyc,,
+ddtrace/propagation/__pycache__/http.cpython-311.pyc,,
+ddtrace/propagation/_database_monitoring.py,sha256=OlGXOdBCzyAcipWY3w0UpOsdR5R1FMqxYNNql6uiT4w,3768
+ddtrace/propagation/_utils.py,sha256=EdttZRhZ85wNtZJ5nOKz50JWTW9RNqbKck_rGF3cNts,976
+ddtrace/propagation/http.py,sha256=IYjOXcKdRkcsYc9Uijub9OkF_GVf_2wFRfnFIxYIfn4,40548
+ddtrace/provider.py,sha256=g-sJdm0Sa52I5cuiBZOemFNXxW816f6TscVZmYG0m7Q,5764
+ddtrace/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+ddtrace/runtime/__init__.py,sha256=Aif4-abbGD3jRhKav4seFbLglncJhqtwLMyCK7mWnSw,1795
+ddtrace/runtime/__pycache__/__init__.cpython-311.pyc,,
+ddtrace/sampler.py,sha256=5aVQ4zc54HjOSc0BoHXyAL5kNx9MyM3AutzhqqEo-l4,12314
+ddtrace/sampling_rule.py,sha256=WXoE3fRi10-E1_IN63UxlNL1P_lpbvLFH2ljCQbtEp8,8074
+ddtrace/settings/__init__.py,sha256=K7w4Jo7H8WFNSUlCP_cTwhHHG4C-oO8LlC7yBpYb4x4,319
+ddtrace/settings/__pycache__/__init__.cpython-311.pyc,,
+ddtrace/settings/__pycache__/_database_monitoring.cpython-311.pyc,,
+ddtrace/settings/__pycache__/asm.cpython-311.pyc,,
+ddtrace/settings/__pycache__/config.cpython-311.pyc,,
+ddtrace/settings/__pycache__/dynamic_instrumentation.cpython-311.pyc,,
+ddtrace/settings/__pycache__/exception_debugging.cpython-311.pyc,,
+ddtrace/settings/__pycache__/exceptions.cpython-311.pyc,,
+ddtrace/settings/__pycache__/http.cpython-311.pyc,,
+ddtrace/settings/__pycache__/integration.cpython-311.pyc,,
+ddtrace/settings/__pycache__/peer_service.cpython-311.pyc,,
+ddtrace/settings/__pycache__/profiling.cpython-311.pyc,,
+ddtrace/settings/_database_monitoring.py,sha256=PdUCzgpCEsOmJVv2hjLQ3_lePN_SuTu-Cxbdg-oHKFo,400
+ddtrace/settings/asm.py,sha256=fRgtjQ0PjrwlGdqeb1UVylQnuTyU4y08QWqCkUFala0,3913
+ddtrace/settings/config.py,sha256=2b2phe_FII2UTPFyTw1-3h9Ey4tadaVivN0YNjjdEWo,33175
+ddtrace/settings/dynamic_instrumentation.py,sha256=zQJhp0GRBjV18X2d7reZHV1xqkQobtqFxYx1P_XdwoM,3853
+ddtrace/settings/exception_debugging.py,sha256=pOSY-q8hsIiGwuSphZJOnUPayhQWu0acdRKGFVn0Dnw,330
+ddtrace/settings/exceptions.py,sha256=w0BejP6qaqnGpwCBsn8oJ65gEmTg1ESDmvDrKS_YRDo,163
+ddtrace/settings/http.py,sha256=5zdHBrGd21h048yGjXwzM-KcmBMV46u1YH7b7no-Q9c,2976
+ddtrace/settings/integration.py,sha256=S-OknNhNTnkwth-Vz3MR-p7nvINrBTg65yZKnDgY0iY,5956
+ddtrace/settings/peer_service.py,sha256=vIEu-APjj66OUfurtBIdyYabsHfY6qkgZjoOTOZA2fE,1587
+ddtrace/settings/profiling.py,sha256=2PlfUgC4qxeanCENiNSD7ouC0O8LhyQl4Z6lheKfNfU,6084
+ddtrace/sourcecode/__init__.py,sha256=u-FB5twfJUJiYiPbptsm-QkAmJfKXIqxb-6P20wzNOQ,1055
+ddtrace/sourcecode/__pycache__/__init__.cpython-311.pyc,,
+ddtrace/sourcecode/__pycache__/_utils.cpython-311.pyc,,
+ddtrace/sourcecode/__pycache__/setuptools_auto.cpython-311.pyc,,
+ddtrace/sourcecode/_utils.py,sha256=W3czbBD5ZOm_pkaGJMzZJiVr2KlMqRswMrU50kB9Q-4,1777
+ddtrace/sourcecode/setuptools_auto.py,sha256=5Y-U4pYFylJ5OtKXUpH7wbPUxZsHITVqEamZnrc4c-I,722
+ddtrace/span.py,sha256=6Br4M9muWOHqYzxOGbiVbaYRskId6mbBwouNuGMz0HI,21471
+ddtrace/tracer.py,sha256=usToTIk7INNewSGttvrV2jYYJwcrfAvxapmwPodGz7w,43015
+ddtrace/tracing/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+ddtrace/tracing/__pycache__/__init__.cpython-311.pyc,,
+ddtrace/tracing/__pycache__/_span_link.cpython-311.pyc,,
+ddtrace/tracing/__pycache__/trace_handlers.cpython-311.pyc,,
+ddtrace/tracing/_span_link.py,sha256=2XIDqE0d5wW1CQBJTo8mMavb0kFfsAvl5SqiEcvSR_c,3411
+ddtrace/tracing/trace_handlers.py,sha256=ZyFyz98IR3pTQVas7Ah2NqlJ9arBHRYi5bxOK9OLD28,23324
+ddtrace/vendor/__init__.py,sha256=zqXumNo12FW-Pe66cv93BkDDTnSO3Se_ZOCgKguxIKE,2962
+ddtrace/vendor/__pycache__/__init__.cpython-311.pyc,,
+ddtrace/vendor/contextvars/__init__.py,sha256=TRvgsEausfPDhHwEBJ-z2tLSMWRC6E_088v4ODkcnXg,3898
+ddtrace/vendor/contextvars/__pycache__/__init__.cpython-311.pyc,,
+ddtrace/vendor/debtcollector/__init__.py,sha256=Q8OE09M7ZpXatj2Ft47FVL_ClA5KY5upavDWXg1DKtE,2175
+ddtrace/vendor/debtcollector/__pycache__/__init__.cpython-311.pyc,,
+ddtrace/vendor/debtcollector/__pycache__/_utils.cpython-311.pyc,,
+ddtrace/vendor/debtcollector/__pycache__/moves.cpython-311.pyc,,
+ddtrace/vendor/debtcollector/__pycache__/removals.cpython-311.pyc,,
+ddtrace/vendor/debtcollector/__pycache__/renames.cpython-311.pyc,,
+ddtrace/vendor/debtcollector/__pycache__/updating.cpython-311.pyc,,
+ddtrace/vendor/debtcollector/_utils.py,sha256=OXhJruEi9X2H0EaVIcNkByltmf2VKkczf2IAfOB71SE,6346
+ddtrace/vendor/debtcollector/moves.py,sha256=tapV2utvk2OtYcM_joe_l3Yg3XuTtEZkbeVT-tfKcCQ,8421
+ddtrace/vendor/debtcollector/removals.py,sha256=0q5MvpcXKP_W1Jx6Qp4URBwYqYc3q_rsJRgKT3VW-Kw,13890
+ddtrace/vendor/debtcollector/renames.py,sha256=Lok-0KYHlVCCho9vkFsFPFI6VtgUzk0z5NdUyEUH9wA,1715
+ddtrace/vendor/debtcollector/updating.py,sha256=2x54f0_UKhs_SsGHWzt0zpEFq6M0_RzmC7O42ZXaFLo,2426
+ddtrace/vendor/dogstatsd/__init__.py,sha256=IEA4PcepGTcsZQUetyoN10380qLjmtou7rX-CPURmW4,312
+ddtrace/vendor/dogstatsd/__pycache__/__init__.cpython-311.pyc,,
+ddtrace/vendor/dogstatsd/__pycache__/base.cpython-311.pyc,,
+ddtrace/vendor/dogstatsd/__pycache__/compat.cpython-311.pyc,,
+ddtrace/vendor/dogstatsd/__pycache__/container.cpython-311.pyc,,
+ddtrace/vendor/dogstatsd/__pycache__/context.cpython-311.pyc,,
+ddtrace/vendor/dogstatsd/__pycache__/context_async.cpython-311.pyc,,
+ddtrace/vendor/dogstatsd/__pycache__/format.cpython-311.pyc,,
+ddtrace/vendor/dogstatsd/__pycache__/route.cpython-311.pyc,,
+ddtrace/vendor/dogstatsd/base.py,sha256=AwYw0ZKoF-BzGj1LugWxvvUHgg-RK3zujKPyyJ_88FM,46735
+ddtrace/vendor/dogstatsd/compat.py,sha256=BVmqw2TtupHEis35I9Cl_FVcb2wd1mGEiEADVWA-eJk,996
+ddtrace/vendor/dogstatsd/container.py,sha256=0doQtobT4ID8GKDwa-jUjUFr_NTsf0jgc2joaUT0y7o,2052
+ddtrace/vendor/dogstatsd/context.py,sha256=FU8kb8meKfgzSB6igyEM6iByqq2u0Hpr98FBqbt8Wco,2844
+ddtrace/vendor/dogstatsd/context_async.py,sha256=wJgbf9n_pHaN95I0I1RoxycjoK18L0ZBGUVrzcVsW4M,1543
+ddtrace/vendor/dogstatsd/format.py,sha256=maACZlLz8DuSv1sNyhQQQtgswyLPiR98HiHU6cwhxRE,1025
+ddtrace/vendor/dogstatsd/route.py,sha256=VOoCuD5XD9PPtEydVjpbz_FldgGEOd8Yazpt2YoVD-U,1253
+ddtrace/vendor/monotonic/__init__.py,sha256=1wJOetpAPQUteaP7IxAelyChpkITsxZf-eV4V2bTHrA,7117
+ddtrace/vendor/monotonic/__pycache__/__init__.cpython-311.pyc,,
+ddtrace/vendor/packaging/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+ddtrace/vendor/packaging/__pycache__/__init__.cpython-311.pyc,,
+ddtrace/vendor/packaging/__pycache__/_structures.cpython-311.pyc,,
+ddtrace/vendor/packaging/__pycache__/version.cpython-311.pyc,,
+ddtrace/vendor/packaging/_structures.py,sha256=DCpKtb7u94_oqgVsIJQTrTyZcb3Gz7sSGbk9vYDMME0,1418
+ddtrace/vendor/packaging/version.py,sha256=MKL8nbKLPLGPouIwFvwSVnYRzNpkMo5AIcsa6LGqDF8,12219
+ddtrace/vendor/psutil/__init__.py,sha256=zLnjfXMB3aOXBkgwVbiaTzFYKvDpY3bRBS12BryUMn8,90581
+ddtrace/vendor/psutil/__pycache__/__init__.cpython-311.pyc,,
+ddtrace/vendor/psutil/__pycache__/_common.cpython-311.pyc,,
+ddtrace/vendor/psutil/__pycache__/_compat.cpython-311.pyc,,
+ddtrace/vendor/psutil/__pycache__/_psaix.cpython-311.pyc,,
+ddtrace/vendor/psutil/__pycache__/_psbsd.cpython-311.pyc,,
+ddtrace/vendor/psutil/__pycache__/_pslinux.cpython-311.pyc,,
+ddtrace/vendor/psutil/__pycache__/_psosx.cpython-311.pyc,,
+ddtrace/vendor/psutil/__pycache__/_psposix.cpython-311.pyc,,
+ddtrace/vendor/psutil/__pycache__/_pssunos.cpython-311.pyc,,
+ddtrace/vendor/psutil/__pycache__/_pswindows.cpython-311.pyc,,
+ddtrace/vendor/psutil/__pycache__/setup.cpython-311.pyc,,
+ddtrace/vendor/psutil/_common.py,sha256=eA2kZtQPvlmMpKw_Fz3lswXSg35tthoM6OAb2UhdNNQ,20224
+ddtrace/vendor/psutil/_compat.py,sha256=c9jBW_7ZcDyl562gIeIh53drZ-oUVLPobOnuAJRhY2w,11191
+ddtrace/vendor/psutil/_psaix.py,sha256=IDY57Ybv5k4eSKH50lD7EJ9slfRoPJ-SLKyniXQFvkw,18564
+ddtrace/vendor/psutil/_psbsd.py,sha256=kvDbgjD38KtIZDhsTNbmMowvjSmKs701D-aYr_5wPQE,30566
+ddtrace/vendor/psutil/_pslinux.py,sha256=N-p3vkd-QG9132CIihCIZ47mPa_vwBWkHZf6x3pI6Xg,79839
+ddtrace/vendor/psutil/_psosx.py,sha256=JbNktzY5i5xQJTWNWdWbFSRPBZneb4_34Pm6GKyDiZs,17196
+ddtrace/vendor/psutil/_psposix.py,sha256=sQajYsNSDFV0HqN3GFf7Rvh8vu9eQLbzMpD2eqgakVk,6159
+ddtrace/vendor/psutil/_pssunos.py,sha256=ZayYw299DPsmA8TzA7UpuFuigW49OC0KrdrU4A1hOlY,25109
+ddtrace/vendor/psutil/_psutil_aix.c,sha256=fEoiu23Up7TxlsiUfeSWSubKRAO8te5we63MBZ6vOWI,31034
+ddtrace/vendor/psutil/_psutil_bsd.c,sha256=neGG1AtSb2f2TOSQ8w99XDn51yvjWjOZENvs5HsDv40,34969
+ddtrace/vendor/psutil/_psutil_common.c,sha256=HecdlMru0pRZAhuORp9xDelN4RaVK4muio4qS0Hfx-M,3136
+ddtrace/vendor/psutil/_psutil_common.h,sha256=mJvKu0yDQYOTpzbfkXnLOk3mFPBxAF56qsyyck29e9o,870
+ddtrace/vendor/psutil/_psutil_linux.c,sha256=PvXPmGMPnxWkqtnHwkOAZhIdfkISpPyklpGQc0la-kI,18930
+ddtrace/vendor/psutil/_psutil_linux.cpython-311-x86_64-linux-gnu.so,sha256=IyZF2O26ibj5PviSho1j16ljMmCgLvMkWyNsJFPO0v8,29376
+ddtrace/vendor/psutil/_psutil_osx.c,sha256=2HZi7AKgRu7lDVu6WYiJZ0Yc0c5Z0zffOzsbeuw1z6o,59036
+ddtrace/vendor/psutil/_psutil_posix.c,sha256=sHiCGhhXyF7t7AYpHvjsDXjVvc3NY3poNZpvNBa3CDo,18620
+ddtrace/vendor/psutil/_psutil_posix.cpython-311-x86_64-linux-gnu.so,sha256=E5JUP3XQTr8MqqYT125bNBaD5KnOZrZycT_DDGNWxxE,22696
+ddtrace/vendor/psutil/_psutil_posix.h,sha256=uWkIEoeQUPIFOuiMzvMo5XzPYkM6JtXsFVPNzyntlu0,256
+ddtrace/vendor/psutil/_psutil_sunos.c,sha256=-LKjVI0TP_n676lCoA-Wrh3u0GvplkrH1Oqc49uuO84,51184
+ddtrace/vendor/psutil/_psutil_windows.c,sha256=ACXdZpmW6Z8onahjDuHRYaBwnX9f0ddgxTsGLP7RPms,114213
+ddtrace/vendor/psutil/_pswindows.py,sha256=A2mlRzUvPxPrj8LETN25MAn6fRcLp12L-RP7bwP2TKM,37400
+ddtrace/vendor/psutil/arch/aix/common.c,sha256=zwuWitDFSTJzYjZVWWqGhrpyhueVxK5Yf0YU1KtCRzc,2284
+ddtrace/vendor/psutil/arch/aix/common.h,sha256=eLKtDGdfcV1kC_h2Wl4dc0kg2wr-qKs5WKIAoKiUx_o,894
+ddtrace/vendor/psutil/arch/aix/ifaddrs.c,sha256=RLyTuG2zstr53zze2KaS2qy0cnvnZ7GaB8ZGI7QD7vA,3839
+ddtrace/vendor/psutil/arch/aix/ifaddrs.h,sha256=Bq9yYVe8ggnStKukX9g7yhSv9teB9aLf3Ms_YSzwNaA,767
+ddtrace/vendor/psutil/arch/aix/net_connections.c,sha256=dw2HRw8Z9XCNeiWmcXk1mj_AukSrG2P_7zGwqNtfj20,8134
+ddtrace/vendor/psutil/arch/aix/net_connections.h,sha256=FWI-ALW2lInyvex3FPEB4Gnb_veTCtaxZah0g0jwyh4,355
+ddtrace/vendor/psutil/arch/aix/net_kernel_structs.h,sha256=3qhQr5WOzoQNjfCTatC6f1m0uJvYQKhvpA3kbW_O5N4,4059
+ddtrace/vendor/psutil/arch/freebsd/proc_socks.c,sha256=UcKuLJ3V5crqeB6z7KfQYWiNBzFaIBODDESMZ0iZwrI,10925
+ddtrace/vendor/psutil/arch/freebsd/proc_socks.h,sha256=8zCu73GZa_RUeX7DZVy--EDPAE2yzvMmW0L7KqrM1fk,263
+ddtrace/vendor/psutil/arch/freebsd/specific.c,sha256=l_QUghmeq9EFP-iamfFF1dRbkf8F-JM3lNtyXgV4ZSk,31802
+ddtrace/vendor/psutil/arch/freebsd/specific.h,sha256=2w0cLpvd91ZhLaFoRF436ag19e7JMqEtrN3QK62jlbY,1563
+ddtrace/vendor/psutil/arch/freebsd/sys_socks.c,sha256=PGWKfERuBDcIX1xEFT6TMotaFfRENknsUjNnpO6zsi0,9986
+ddtrace/vendor/psutil/arch/freebsd/sys_socks.h,sha256=_VQx5Cch72yHa5a73s7-6XVbY15kO_DOZ6vJb6Wj7Yg,265
+ddtrace/vendor/psutil/arch/netbsd/socks.c,sha256=NI4eN62mpbqgbxm2uJnMdUrqSPf9cL8k53wGFxNvYzk,12227
+ddtrace/vendor/psutil/arch/netbsd/socks.h,sha256=N8aNAVjqpoywUwhlRSXxe0dxQMOaBL2JU-y7vQuOmNs,331
+ddtrace/vendor/psutil/arch/netbsd/specific.c,sha256=cwyxa9_JWAf5m3x3Wvl-JA63F8HbXeu8AEvIiLImkeI,17351
+ddtrace/vendor/psutil/arch/netbsd/specific.h,sha256=DiMug_IhzJMkSwJKRcLHWrJJlc3XhPLLFFvfdDyT_qI,1220
+ddtrace/vendor/psutil/arch/openbsd/specific.c,sha256=vPG1fdC8EePuBbg5BodoOB1CHUOKKlmJsYx1_QjNwJ0,22349
+ddtrace/vendor/psutil/arch/openbsd/specific.h,sha256=c0_IddDQN7zplGtruCD_4-oPfgjQZxS7S9o2KG6DG5k,1086
+ddtrace/vendor/psutil/arch/osx/process_info.c,sha256=sLR5Vb4TYvdcoER5RxtVlTns-9FuhnU2GPYAT_gl4_c,9875
+ddtrace/vendor/psutil/arch/osx/process_info.h,sha256=dPBFqGTCs34kI8Mw1w-Ot3QhS6PrnDVkGKAkRM5C-lg,602
+ddtrace/vendor/psutil/arch/solaris/environ.c,sha256=UDkpvJMkAjPPUAYhcg7OOmuW_hxbz7W2HOa-a9vbwOU,10188
+ddtrace/vendor/psutil/arch/solaris/environ.h,sha256=aQIDTp6uPYLPCyd7qPK1go_BQAthD7IyG2kH2zwjcZM,511
+ddtrace/vendor/psutil/arch/solaris/v10/ifaddrs.c,sha256=73ckKxa0IQjH38Wawr-G5_Tb922goQQZ4eXqDbQKsZg,3253
+ddtrace/vendor/psutil/arch/solaris/v10/ifaddrs.h,sha256=VpaMZuVkenSRVoCBbqaAaaZnC9S6JI-VFiShSv747Go,567
+ddtrace/vendor/psutil/arch/windows/global.c,sha256=FgYmyGBKIP0HrngAra05m_kkbWGiscb2FQ4PQ7wwTMY,6654
+ddtrace/vendor/psutil/arch/windows/global.h,sha256=ROqv8VNajiYBUJ2ltJrsX9Ic57590BB8hyDZqMVV4Hc,1841
+ddtrace/vendor/psutil/arch/windows/inet_ntop.c,sha256=0Iqg1WKrC5mZ84xBVuqeBKmFm4U2iZCBr3jalGNct5s,1407
+ddtrace/vendor/psutil/arch/windows/inet_ntop.h,sha256=REJAYcwsuTpXo5KDh7Fd37EiTM882S0xL9LPjsqC9v8,575
+ddtrace/vendor/psutil/arch/windows/ntextapi.h,sha256=SF141Jz80K5EWZgKilLMtt3UcsW7cet3P8xzoW5Sd1k,13760
+ddtrace/vendor/psutil/arch/windows/process_handles.c,sha256=OYyUdPT2Dh924Mj4MPWsDIg3hDz-Gp4NJv7gH8OWpcc,14681
+ddtrace/vendor/psutil/arch/windows/process_handles.h,sha256=3XlBjAsF8Fw77dnaBiMUR6rpkIhF30h0yb_-vogZ9Jc,293
+ddtrace/vendor/psutil/arch/windows/process_info.c,sha256=Aursf0zYxWuktaFmtUjOqrnxXgCxW-X13chE28bhA0Y,29326
+ddtrace/vendor/psutil/arch/windows/process_info.h,sha256=29okjO4nZXHhag1M8ZEy2tYUdSZNdV9RoVFNh4zmo_I,1000
+ddtrace/vendor/psutil/arch/windows/security.c,sha256=nwkIAw66q-D9TrMrrmmp6529w3PE5YeGZ2IgyzXKOoc,3672
+ddtrace/vendor/psutil/arch/windows/security.h,sha256=OEwyZUlqk6Ipsmdg6ho7E1XULgJlciOteIr4Kxf-9nc,365
+ddtrace/vendor/psutil/arch/windows/services.c,sha256=rW4LUZZiFfLdNI6I5lgEAobbJesEvrSJBrYeWEs41q8,13168
+ddtrace/vendor/psutil/arch/windows/services.h,sha256=4Wo7Tn_AgeXd6UQ68uGphuFxxxeXc0u734skNmzHG1s,730
+ddtrace/vendor/psutil/arch/windows/wmi.c,sha256=TWJJmrLfNLZ-8Lj0SBMPMe2J5gUNmF3TF-O3uNsFLEI,3408
+ddtrace/vendor/psutil/arch/windows/wmi.h,sha256=E-fBKzcyeLaj3e2b_26jxtchpm0X7YVvKkHp-ix3BPA,282
+ddtrace/vendor/psutil/setup.py,sha256=Ze5fZ_gj3Y6e4i3Wb446CgVdiICXpfLYkZVvlllusqo,7931
+ddtrace/vendor/sqlcommenter/__init__.py,sha256=w33CHxf0qVmKrWygWIRc_bw_vK6ukSXvEli5p22XQL0,1723
+ddtrace/vendor/sqlcommenter/__pycache__/__init__.cpython-311.pyc,,
+ddtrace/vendor/wrapt/LICENSE,sha256=-ORXVVFm6-jW4-VBEREdObh8EdASF-dmJ7gpCSKYLUQ,1303
+ddtrace/vendor/wrapt/__init__.py,sha256=Bh0h33Iapc_qaoLWsWfaXK5xJz9KJExF7gQKIWYdSsg,1200
+ddtrace/vendor/wrapt/__pycache__/__init__.cpython-311.pyc,,
+ddtrace/vendor/wrapt/__pycache__/arguments.cpython-311.pyc,,
+ddtrace/vendor/wrapt/__pycache__/decorators.cpython-311.pyc,,
+ddtrace/vendor/wrapt/__pycache__/importer.cpython-311.pyc,,
+ddtrace/vendor/wrapt/__pycache__/setup.cpython-311.pyc,,
+ddtrace/vendor/wrapt/__pycache__/wrappers.cpython-311.pyc,,
+ddtrace/vendor/wrapt/_wrappers.c,sha256=X3MBBnD_-XcLbVoRW73-9vQ5Qbl5J5SCUH-unjzjBRk,95815
+ddtrace/vendor/wrapt/_wrappers.cpython-311-x86_64-linux-gnu.so,sha256=f9xbe8ViWi0KK0yz3fZjQOsS4tqXCZ1P8Jg6BxSa9mQ,65368
+ddtrace/vendor/wrapt/arguments.py,sha256=RF0nTEdPzPIewJ-jnSY42i4JSzK3ctjPABV1SJxLymg,1746
+ddtrace/vendor/wrapt/decorators.py,sha256=gNy1PVq9NNVDAB9tujaAVhb0xtVKSSzqT-hdGFeWM34,21332
+ddtrace/vendor/wrapt/importer.py,sha256=yxFgVg6-lRTbSVJ2oZbw1TPCtB98fIF4A_qi_Dh2JRc,9981
+ddtrace/vendor/wrapt/setup.py,sha256=CF2p_6VhgEGASbK2JH_MARGMt3GHe6uADKRqu573QY0,191
+ddtrace/vendor/wrapt/wrappers.py,sha256=ofd5HIVcZ8-YCcMH1SCeUcxsueYhRLcBDByrP_366yQ,35222
+ddtrace/version.py,sha256=uvdArJQL_C2QEfxXrcI1hs28bkcgLWfgwzpLUj50HI0,527
diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace-2.6.5.dist-info/REQUESTED b/lambdas/aws-dd-forwarder-3.127.0/ddtrace-2.6.5.dist-info/REQUESTED
new file mode 100644
index 0000000..e69de29
diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace-2.6.5.dist-info/WHEEL b/lambdas/aws-dd-forwarder-3.127.0/ddtrace-2.6.5.dist-info/WHEEL
new file mode 100644
index 0000000..4497ba5
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace-2.6.5.dist-info/WHEEL
@@ -0,0 +1,6 @@
+Wheel-Version: 1.0
+Generator: bdist_wheel (0.42.0)
+Root-Is-Purelib: false
+Tag: cp311-cp311-manylinux_2_17_x86_64
+Tag: cp311-cp311-manylinux2014_x86_64
+
diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace-2.6.5.dist-info/entry_points.txt b/lambdas/aws-dd-forwarder-3.127.0/ddtrace-2.6.5.dist-info/entry_points.txt
new file mode 100644
index 0000000..a2ad0bb
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace-2.6.5.dist-info/entry_points.txt
@@ -0,0 +1,10 @@
+[console_scripts]
+ddtrace-run = ddtrace.commands.ddtrace_run:main
+
+[opentelemetry_context]
+ddcontextvars_context = ddtrace.opentelemetry._context:DDRuntimeContext
+
+[pytest11]
+ddtrace = ddtrace.contrib.pytest.plugin
+ddtrace.pytest_bdd = ddtrace.contrib.pytest_bdd.plugin
+ddtrace.pytest_benchmark = ddtrace.contrib.pytest_benchmark.plugin
diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace-2.6.5.dist-info/top_level.txt b/lambdas/aws-dd-forwarder-3.127.0/ddtrace-2.6.5.dist-info/top_level.txt
new file mode 100644
index 0000000..749bf29
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace-2.6.5.dist-info/top_level.txt
@@ -0,0 +1 @@
+ddtrace
diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/.DS_Store b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/.DS_Store
new file mode 100644
index 0000000..78912b3
Binary files /dev/null and b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/.DS_Store differ
diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/__init__.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/__init__.py
new file mode 100644
index 0000000..cab7b7a
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/__init__.py
@@ -0,0 +1,58 @@
+import sys
+
+LOADED_MODULES = frozenset(sys.modules.keys())
+
+from ddtrace.internal.module import ModuleWatchdog
+
+ModuleWatchdog.install()
+
+# Acquire a reference to the threading module. Some parts of the library (e.g.
+# the profiler) might be enabled programmatically and therefore might end up
+# getting a reference to the tracee's threading module. By storing a reference
+# to the threading module used by ddtrace here, we make it easy for those parts
+# to get a reference to the right threading module.
+import threading as _threading
+
+from ._logger import configure_ddtrace_logger
+
+
+# configure ddtrace logger before other modules log
+configure_ddtrace_logger() # noqa: E402
+
+from .settings import _config as config
+
+if config._telemetry_enabled:
+ from ddtrace.internal import telemetry
+
+ telemetry.install_excepthook()
+ # In order to support 3.12, we start the writer upon initialization.
+ # See https://github.com/python/cpython/pull/104826.
+ # Telemetry events will only be sent after the `app-started` is queued.
+ # This will occur when the agent writer starts.
+ telemetry.telemetry_writer.enable()
+
+from ._monkey import patch # noqa: E402
+from ._monkey import patch_all # noqa: E402
+from .internal.utils.deprecations import DDTraceDeprecationWarning # noqa: E402
+from .pin import Pin # noqa: E402
+from .settings import _config as config # noqa: E402
+from .span import Span # noqa: E402
+from .tracer import Tracer # noqa: E402
+from .version import get_version # noqa: E402
+
+
+__version__ = get_version()
+
+# a global tracer instance with integration settings
+tracer = Tracer()
+
+__all__ = [
+ "patch",
+ "patch_all",
+ "Pin",
+ "Span",
+ "tracer",
+ "Tracer",
+ "config",
+ "DDTraceDeprecationWarning",
+]
diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/_hooks.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/_hooks.py
new file mode 100644
index 0000000..07bc149
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/_hooks.py
@@ -0,0 +1,137 @@
+import collections
+from copy import deepcopy
+from typing import Any # noqa:F401
+from typing import Callable # noqa:F401
+from typing import Optional # noqa:F401
+from typing import Set # noqa:F401
+
+import attr
+
+from .internal.logger import get_logger
+
+
+try:
+ from typing import DefaultDict # noqa:F401
+except ImportError:
+ from collections import defaultdict as DefaultDict
+
+log = get_logger(__name__)
+
+
+@attr.s(slots=True)
+class Hooks(object):
+ """
+ Hooks configuration object is used for registering and calling hook functions
+
+ Example::
+
+ @config.falcon.hooks.on('request')
+ def on_request(span, request, response):
+ pass
+ """
+
+ _hooks = attr.ib(init=False, factory=lambda: collections.defaultdict(set), type=DefaultDict[str, Set])
+
+ def __deepcopy__(self, memodict=None):
+ hooks = Hooks()
+ hooks._hooks = deepcopy(self._hooks, memodict)
+ return hooks
+
+ def register(
+ self,
+ hook, # type: Any
+ func=None, # type: Optional[Callable]
+ ):
+ # type: (...) -> Optional[Callable[..., Any]]
+ """
+ Function used to register a hook for the provided name.
+
+ Example::
+
+ def on_request(span, request, response):
+ pass
+
+ config.falcon.hooks.register('request', on_request)
+
+
+ If no function is provided then a decorator is returned::
+
+ @config.falcon.hooks.register('request')
+ def on_request(span, request, response):
+ pass
+
+ :param hook: The name of the hook to register the function for
+ :type hook: object
+ :param func: The function to register, or ``None`` if a decorator should be returned
+ :type func: function, None
+ :returns: Either a function decorator if ``func is None``, otherwise ``None``
+ :rtype: function, None
+ """
+ # If they didn't provide a function, then return a decorator
+ if not func:
+
+ def wrapper(func):
+ self.register(hook, func)
+ return func
+
+ return wrapper
+ self._hooks[hook].add(func)
+ return None
+
+ # Provide shorthand `on` method for `register`
+ # >>> @config.falcon.hooks.on('request')
+ # def on_request(span, request, response):
+ # pass
+ on = register
+
+ def deregister(
+ self,
+ hook, # type: Any
+ func, # type: Callable
+ ):
+ # type: (...) -> None
+ """
+ Function to deregister a function from a hook it was registered under
+
+ Example::
+
+ @config.falcon.hooks.on('request')
+ def on_request(span, request, response):
+ pass
+
+ config.falcon.hooks.deregister('request', on_request)
+
+ :param hook: The name of the hook to register the function for
+ :type hook: object
+ :param func: Function hook to register
+ :type func: function
+ """
+ if hook in self._hooks:
+ try:
+ self._hooks[hook].remove(func)
+ except KeyError:
+ pass
+
+ def emit(
+ self,
+ hook, # type: Any
+ *args, # type: Any
+ **kwargs, # type: Any
+ ):
+ # type: (...) -> None
+ """
+ Function used to call registered hook functions.
+
+ :param hook: The hook to call functions for
+ :type hook: str
+ :param args: Positional arguments to pass to the hook functions
+ :type args: list
+ :param kwargs: Keyword arguments to pass to the hook functions
+ :type kwargs: dict
+ """
+ # Call registered hooks
+ for func in self._hooks.get(hook, ()):
+ try:
+ func(*args, **kwargs)
+ except Exception:
+ log.error("Failed to run hook %s function %s", hook, func, exc_info=True)
diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/_logger.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/_logger.py
new file mode 100644
index 0000000..f7989f8
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/_logger.py
@@ -0,0 +1,88 @@
+import logging
+from logging.handlers import RotatingFileHandler
+import os
+
+from ddtrace.internal.utils.formats import asbool
+
+
+DD_LOG_FORMAT = "%(asctime)s %(levelname)s [%(name)s] [%(filename)s:%(lineno)d] {}- %(message)s".format(
+ "[dd.service=%(dd.service)s dd.env=%(dd.env)s dd.version=%(dd.version)s"
+ " dd.trace_id=%(dd.trace_id)s dd.span_id=%(dd.span_id)s] "
+)
+
+DEFAULT_FILE_SIZE_BYTES = 15 << 20 # 15 MB
+
+
+def configure_ddtrace_logger():
+ # type: () -> None
+ """Configures ddtrace log levels and file paths.
+
+ Customization is possible with the environment variables:
+ ``DD_TRACE_DEBUG``, ``DD_TRACE_LOG_FILE_LEVEL``, and ``DD_TRACE_LOG_FILE``
+
+ By default, when none of the settings have been changed, ddtrace loggers
+ inherit from the root logger in the logging module and no logs are written to a file.
+
+ When DD_TRACE_DEBUG has been enabled:
+ - Logs are propagated up so that they appear in the application logs if a file path wasn't provided
+ - Logs are routed to a file when DD_TRACE_LOG_FILE is specified, using the log level in DD_TRACE_LOG_FILE_LEVEL.
+ - Child loggers inherit from the parent ddtrace logger
+
+ Note(s):
+ 1) The ddtrace-run logs under commands/ddtrace_run do not follow DD_TRACE_LOG_FILE if DD_TRACE_DEBUG is enabled.
+ This is because ddtrace-run calls ``logging.basicConfig()`` when DD_TRACE_DEBUG is enabled, so
+ this configuration is not applied.
+ 2) Python 2: If the application is using DD_TRACE_DEBUG=true, logging will need to be configured,
+ ie: ``logging.basicConfig()``.
+
+ """
+ ddtrace_logger = logging.getLogger("ddtrace")
+ if asbool(os.environ.get("DD_TRACE_LOG_STREAM_HANDLER", "true")):
+ ddtrace_logger.addHandler(logging.StreamHandler())
+
+ _configure_ddtrace_debug_logger(ddtrace_logger)
+ _configure_ddtrace_file_logger(ddtrace_logger)
+
+
+def _configure_ddtrace_debug_logger(logger):
+ if asbool(os.environ.get("DD_TRACE_DEBUG", "false")):
+ logger.setLevel(logging.DEBUG)
+ logger.debug("debug mode has been enabled for the ddtrace logger")
+
+
+def _configure_ddtrace_file_logger(logger):
+ log_file_level = os.environ.get("DD_TRACE_LOG_FILE_LEVEL", "DEBUG").upper()
+ try:
+ file_log_level_value = getattr(logging, log_file_level)
+ except AttributeError:
+ raise ValueError(
+ "DD_TRACE_LOG_FILE_LEVEL is invalid. Log level must be CRITICAL/ERROR/WARNING/INFO/DEBUG.",
+ log_file_level,
+ )
+
+ log_path = os.environ.get("DD_TRACE_LOG_FILE")
+ if log_path is not None:
+ log_path = os.path.abspath(log_path)
+ max_file_bytes = int(os.environ.get("DD_TRACE_LOG_FILE_SIZE_BYTES", DEFAULT_FILE_SIZE_BYTES))
+ num_backup = 1
+ ddtrace_file_handler = RotatingFileHandler(
+ filename=log_path, mode="a", maxBytes=max_file_bytes, backupCount=num_backup
+ )
+ log_format = "%(asctime)s %(levelname)s [%(name)s] [%(filename)s:%(lineno)d] - %(message)s"
+ log_formatter = logging.Formatter(log_format)
+ ddtrace_file_handler.setLevel(file_log_level_value)
+ ddtrace_file_handler.setFormatter(log_formatter)
+ logger.addHandler(ddtrace_file_handler)
+ logger.debug("ddtrace logs will be routed to %s", log_path)
+
+
+def _configure_log_injection():
+ """
+ Ensures that logging is patched before we inject trace information into logs.
+ """
+ from ddtrace import patch
+
+ patch(logging=True)
+ ddtrace_logger = logging.getLogger("ddtrace")
+ for handler in ddtrace_logger.handlers:
+ handler.setFormatter(logging.Formatter(DD_LOG_FORMAT))
diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/_monkey.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/_monkey.py
new file mode 100644
index 0000000..6090e9e
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/_monkey.py
@@ -0,0 +1,273 @@
+import importlib
+import os
+import threading
+from typing import TYPE_CHECKING # noqa:F401
+
+from ddtrace.vendor.wrapt.importer import when_imported
+
+from .internal.logger import get_logger
+from .internal.utils import formats
+from .settings import _config as config
+from .settings.asm import config as asm_config
+
+
+if TYPE_CHECKING: # pragma: no cover
+ from typing import Any # noqa:F401
+ from typing import Callable # noqa:F401
+ from typing import List # noqa:F401
+ from typing import Union # noqa:F401
+
+
+log = get_logger(__name__)
+
+# Default set of modules to automatically patch or not
+PATCH_MODULES = {
+ "aioredis": True,
+ "aiomysql": True,
+ "aredis": True,
+ "asyncio": True,
+ "boto": True,
+ "botocore": True,
+ "bottle": True,
+ "cassandra": True,
+ "celery": True,
+ "consul": True,
+ "django": True,
+ "elasticsearch": True,
+ "algoliasearch": True,
+ "futures": True,
+ "gevent": True,
+ "graphql": True,
+ "grpc": True,
+ "httpx": True,
+ "kafka": True,
+ "mongoengine": True,
+ "mysql": True,
+ "mysqldb": True,
+ "pymysql": True,
+ "mariadb": True,
+ "psycopg": True,
+ "pylibmc": True,
+ "pymemcache": True,
+ "pymongo": True,
+ "redis": True,
+ "rediscluster": True,
+ "requests": True,
+ "rq": True,
+ "sanic": True,
+ "snowflake": False,
+ "sqlalchemy": False, # Prefer DB client instrumentation
+ "sqlite3": True,
+ "aiohttp": True, # requires asyncio (Python 3.4+)
+ "aiohttp_jinja2": True,
+ "aiopg": True,
+ "aiobotocore": False,
+ "httplib": False,
+ "urllib3": False,
+ "vertica": True,
+ "molten": True,
+ "jinja2": True,
+ "mako": True,
+ "flask": True,
+ "flask_login": True,
+ "kombu": False,
+ "starlette": True,
+ # Ignore some web framework integrations that might be configured explicitly in code
+ "falcon": True,
+ "pyramid": True,
+ # Auto-enable logging if the environment variable DD_LOGS_INJECTION is true
+ "logbook": config.logs_injection,
+ "logging": config.logs_injection,
+ "loguru": config.logs_injection,
+ "structlog": config.logs_injection,
+ "pynamodb": True,
+ "pyodbc": True,
+ "fastapi": True,
+ "dogpile_cache": True,
+ "yaaredis": True,
+ "asyncpg": True,
+ "aws_lambda": True, # patch only in AWS Lambda environments
+ "tornado": False,
+ "openai": True,
+ "langchain": True,
+ "subprocess": True,
+ "unittest": True,
+ "coverage": False,
+}
+
+
+# this information would make sense to live in the contrib modules,
+# but that would mean getting it would require importing those modules,
+# which we need to avoid until as late as possible.
+CONTRIB_DEPENDENCIES = {
+ "tornado": ("futures",),
+}
+
+
+_LOCK = threading.Lock()
+_PATCHED_MODULES = set()
+
+# Module names that need to be patched for a given integration. If the module
+# name coincides with the integration name, then there is no need to add an
+# entry here.
+_MODULES_FOR_CONTRIB = {
+ "elasticsearch": (
+ "elasticsearch",
+ "elasticsearch1",
+ "elasticsearch2",
+ "elasticsearch5",
+ "elasticsearch6",
+ "elasticsearch7",
+ # Starting with version 8, the default transport which is what we
+ # actually patch is found in the separate elastic_transport package
+ "elastic_transport",
+ "opensearchpy",
+ ),
+ "psycopg": (
+ "psycopg",
+ "psycopg2",
+ ),
+ "snowflake": ("snowflake.connector",),
+ "cassandra": ("cassandra.cluster",),
+ "dogpile_cache": ("dogpile.cache",),
+ "mysqldb": ("MySQLdb",),
+ "futures": ("concurrent.futures.thread",),
+ "vertica": ("vertica_python",),
+ "aws_lambda": ("datadog_lambda",),
+ "httplib": ("http.client",),
+ "kafka": ("confluent_kafka",),
+}
+
+
+DEFAULT_MODULES_PREFIX = "ddtrace.contrib"
+
+
+class PatchException(Exception):
+ """Wraps regular `Exception` class when patching modules"""
+
+ pass
+
+
+class ModuleNotFoundException(PatchException):
+ pass
+
+
+def _on_import_factory(module, prefix="ddtrace.contrib", raise_errors=True, patch_indicator=True):
+ # type: (str, str, bool, Union[bool, List[str]]) -> Callable[[Any], None]
+ """Factory to create an import hook for the provided module name"""
+
+ def on_import(hook):
+ if config._telemetry_enabled:
+ from .internal import telemetry
+ # Import and patch module
+ path = "%s.%s" % (prefix, module)
+ try:
+ imported_module = importlib.import_module(path)
+ except Exception as e:
+ if raise_errors:
+ raise
+ error_msg = "failed to import ddtrace module %r when patching on import" % (path,)
+ log.error(error_msg, exc_info=True)
+ if config._telemetry_enabled:
+ telemetry.telemetry_writer.add_integration(module, False, PATCH_MODULES.get(module) is True, error_msg)
+ telemetry.telemetry_writer.add_count_metric(
+ "tracers", "integration_errors", 1, (("integration_name", module), ("error_type", type(e).__name__))
+ )
+ else:
+ imported_module.patch()
+ if config._telemetry_enabled:
+ if hasattr(imported_module, "get_versions"):
+ versions = imported_module.get_versions()
+ for name, v in versions.items():
+ telemetry.telemetry_writer.add_integration(
+ name, True, PATCH_MODULES.get(module) is True, "", version=v
+ )
+ else:
+ version = imported_module.get_version()
+ telemetry.telemetry_writer.add_integration(
+ module, True, PATCH_MODULES.get(module) is True, "", version=version
+ )
+
+ if hasattr(imported_module, "patch_submodules"):
+ imported_module.patch_submodules(patch_indicator)
+
+ return on_import
+
+
+def patch_all(**patch_modules):
+ # type: (bool) -> None
+ """Automatically patches all available modules.
+
+ In addition to ``patch_modules``, an override can be specified via an
+ environment variable, ``DD_TRACE__ENABLED`` for each module.
+
+ ``patch_modules`` have the highest precedence for overriding.
+
+ :param dict patch_modules: Override whether particular modules are patched or not.
+
+ >>> patch_all(redis=False, cassandra=False)
+ """
+ modules = PATCH_MODULES.copy()
+
+ # The enabled setting can be overridden by environment variables
+ for module, _enabled in modules.items():
+ env_var = "DD_TRACE_%s_ENABLED" % module.upper()
+ if env_var in os.environ:
+ modules[module] = formats.asbool(os.environ[env_var])
+
+ # Enable all dependencies for the module
+ if modules[module]:
+ for dep in CONTRIB_DEPENDENCIES.get(module, ()):
+ modules[dep] = True
+
+ # Arguments take precedence over the environment and the defaults.
+ modules.update(patch_modules)
+
+ patch(raise_errors=False, **modules)
+ if asm_config._iast_enabled:
+ from ddtrace.appsec._iast._patch_modules import patch_iast
+
+ patch_iast()
+
+
+def patch(raise_errors=True, patch_modules_prefix=DEFAULT_MODULES_PREFIX, **patch_modules):
+ # type: (bool, str, Union[List[str], bool]) -> None
+ """Patch only a set of given modules.
+
+ :param bool raise_errors: Raise error if one patch fail.
+ :param dict patch_modules: List of modules to patch.
+
+ >>> patch(psycopg=True, elasticsearch=True)
+ """
+ contribs = {c: patch_indicator for c, patch_indicator in patch_modules.items() if patch_indicator}
+ for contrib, patch_indicator in contribs.items():
+ # Check if we have the requested contrib.
+ if not os.path.isfile(os.path.join(os.path.dirname(__file__), "contrib", contrib, "__init__.py")):
+ if raise_errors:
+ raise ModuleNotFoundException(
+ "integration module ddtrace.contrib.%s does not exist, "
+ "module will not have tracing available" % contrib
+ )
+ modules_to_patch = _MODULES_FOR_CONTRIB.get(contrib, (contrib,))
+ for module in modules_to_patch:
+ # Use factory to create handler to close over `module` and `raise_errors` values from this loop
+ when_imported(module)(
+ _on_import_factory(contrib, raise_errors=raise_errors, patch_indicator=patch_indicator)
+ )
+
+ # manually add module to patched modules
+ with _LOCK:
+ _PATCHED_MODULES.add(contrib)
+
+ log.info(
+ "Configured ddtrace instrumentation for %s integration(s). The following modules have been patched: %s",
+ len(contribs),
+ ",".join(contribs),
+ )
+
+
+def _get_patched_modules():
+ # type: () -> List[str]
+ """Get the list of patched modules"""
+ with _LOCK:
+ return sorted(_PATCHED_MODULES)
diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/_trace/__init__.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/_trace/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/_trace/_limits.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/_trace/_limits.py
new file mode 100644
index 0000000..2d773d0
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/_trace/_limits.py
@@ -0,0 +1,6 @@
+"""
+Limits for trace data.
+"""
+
+MAX_SPAN_META_KEY_LEN = 200
+MAX_SPAN_META_VALUE_LEN = 25000
diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/_version.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/_version.py
new file mode 100644
index 0000000..9c520e5
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/_version.py
@@ -0,0 +1,16 @@
+# file generated by setuptools_scm
+# don't change, don't track in version control
+TYPE_CHECKING = False
+if TYPE_CHECKING:
+ from typing import Tuple, Union
+ VERSION_TUPLE = Tuple[Union[int, str], ...]
+else:
+ VERSION_TUPLE = object
+
+version: str
+__version__: str
+__version_tuple__: VERSION_TUPLE
+version_tuple: VERSION_TUPLE
+
+__version__ = version = '2.6.5'
+__version_tuple__ = version_tuple = (2, 6, 5)
diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/__init__.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_api_security/__init__.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_api_security/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_api_security/api_manager.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_api_security/api_manager.py
new file mode 100644
index 0000000..60e3b14
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_api_security/api_manager.py
@@ -0,0 +1,163 @@
+import base64
+import gzip
+import json
+import sys
+from typing import TYPE_CHECKING # noqa:F401
+
+from ddtrace._trace._limits import MAX_SPAN_META_VALUE_LEN
+from ddtrace.appsec import _processor as appsec_processor
+from ddtrace.appsec._asm_request_context import add_context_callback
+from ddtrace.appsec._asm_request_context import call_waf_callback
+from ddtrace.appsec._asm_request_context import remove_context_callback
+from ddtrace.appsec._constants import API_SECURITY
+from ddtrace.appsec._constants import SPAN_DATA_NAMES
+from ddtrace.internal.logger import get_logger
+from ddtrace.internal.metrics import Metrics
+from ddtrace.internal.service import Service
+from ddtrace.settings.asm import config as asm_config
+
+
+if TYPE_CHECKING:
+ from typing import Optional # noqa:F401
+
+
+log = get_logger(__name__)
+metrics = Metrics(namespace="datadog.api_security")
+_sentinel = object()
+
+
+class TooLargeSchemaException(Exception):
+ pass
+
+
+class APIManager(Service):
+ COLLECTED = [
+ ("REQUEST_HEADERS_NO_COOKIES", API_SECURITY.REQUEST_HEADERS_NO_COOKIES, dict),
+ ("REQUEST_COOKIES", API_SECURITY.REQUEST_COOKIES, dict),
+ ("REQUEST_QUERY", API_SECURITY.REQUEST_QUERY, dict),
+ ("REQUEST_PATH_PARAMS", API_SECURITY.REQUEST_PATH_PARAMS, dict),
+ ("REQUEST_BODY", API_SECURITY.REQUEST_BODY, None),
+ ("RESPONSE_HEADERS_NO_COOKIES", API_SECURITY.RESPONSE_HEADERS_NO_COOKIES, dict),
+ ("RESPONSE_BODY", API_SECURITY.RESPONSE_BODY, None),
+ ]
+
+ _instance = None # type: Optional[APIManager]
+
+ SAMPLE_START_VALUE = 1.0 - sys.float_info.epsilon
+
+ @classmethod
+ def enable(cls):
+ # type: () -> None
+ if cls._instance is not None:
+ log.debug("%s already enabled", cls.__name__)
+ return
+
+ log.debug("Enabling %s", cls.__name__)
+ metrics.enable()
+ cls._instance = cls()
+ cls._instance.start()
+ log.debug("%s enabled", cls.__name__)
+
+ @classmethod
+ def disable(cls):
+ # type: () -> None
+ if cls._instance is None:
+ log.debug("%s not enabled", cls.__name__)
+ return
+
+ log.debug("Disabling %s", cls.__name__)
+ cls._instance.stop()
+ cls._instance = None
+ metrics.disable()
+ log.debug("%s disabled", cls.__name__)
+
+ def __init__(self):
+ # type: () -> None
+ super(APIManager, self).__init__()
+
+ self.current_sampling_value = self.SAMPLE_START_VALUE
+ self._schema_meter = metrics.get_meter("schema")
+ log.debug("%s initialized", self.__class__.__name__)
+
+ def _stop_service(self):
+ # type: () -> None
+ remove_context_callback(self._schema_callback, global_callback=True)
+
+ def _start_service(self):
+ # type: () -> None
+ add_context_callback(self._schema_callback, global_callback=True)
+
+ def _should_collect_schema(self, env):
+ method = env.waf_addresses.get(SPAN_DATA_NAMES.REQUEST_METHOD)
+ route = env.waf_addresses.get(SPAN_DATA_NAMES.REQUEST_ROUTE)
+ sample_rate = asm_config._api_security_sample_rate
+ # Framework is not fully supported
+ if not method or not route:
+ log.debug("unsupported groupkey for api security [method %s] [route %s]", bool(method), bool(route))
+ return False
+ # Rate limit per route
+ self.current_sampling_value += sample_rate
+ if self.current_sampling_value >= 1.0:
+ self.current_sampling_value -= 1.0
+ return True
+ return False
+
+ def _schema_callback(self, env):
+ from ddtrace.appsec._utils import _appsec_apisec_features_is_active
+
+ if env.span is None or not _appsec_apisec_features_is_active():
+ return
+ root = env.span._local_root or env.span
+ if not root or any(meta_name in root._meta for _, meta_name, _ in self.COLLECTED):
+ return
+
+ try:
+ if not self._should_collect_schema(env):
+ return
+ except Exception:
+ log.warning("Failed to sample request for schema generation", exc_info=True)
+
+ # we need the request content type on the span
+ try:
+ headers = env.waf_addresses.get(SPAN_DATA_NAMES.REQUEST_HEADERS_NO_COOKIES, _sentinel)
+ if headers is not _sentinel:
+ appsec_processor._set_headers(root, headers, kind="request")
+ except Exception:
+ log.debug("Failed to enrich request span with headers", exc_info=True)
+
+ waf_payload = {}
+ for address, _, transform in self.COLLECTED:
+ if not asm_config._api_security_parse_response_body and address == "RESPONSE_BODY":
+ continue
+ value = env.waf_addresses.get(SPAN_DATA_NAMES[address], _sentinel)
+ if value is _sentinel:
+ log.debug("no value for %s", address)
+ continue
+ if transform is not None:
+ value = transform(value)
+ waf_payload[address] = value
+ if waf_payload:
+ waf_payload["PROCESSOR_SETTINGS"] = {"extract-schema": True}
+ result = call_waf_callback(waf_payload)
+ if result is None:
+ return
+ for meta, schema in result.items():
+ b64_gzip_content = b""
+ try:
+ b64_gzip_content = base64.b64encode(
+ gzip.compress(json.dumps(schema, separators=",:").encode())
+ ).decode()
+ if len(b64_gzip_content) >= MAX_SPAN_META_VALUE_LEN:
+ raise TooLargeSchemaException
+ root._meta[meta] = b64_gzip_content
+ except Exception as e:
+ self._schema_meter.increment("errors", tags={"exc": e.__class__.__name__, "address": address})
+ self._log_limiter.limit(
+ log.warning,
+ "Failed to get schema from %r [schema length=%d]:\n%s",
+ address,
+ len(b64_gzip_content),
+ repr(value)[:256],
+ exc_info=True,
+ )
+ self._schema_meter.increment("spans")
diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_asm_request_context.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_asm_request_context.py
new file mode 100644
index 0000000..1e1f28e
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_asm_request_context.py
@@ -0,0 +1,549 @@
+import contextlib
+import functools
+import json
+from typing import Any
+from typing import Callable
+from typing import Dict
+from typing import Generator
+from typing import List
+from typing import Optional
+from typing import Set
+from typing import Tuple
+from urllib import parse
+
+from ddtrace.appsec import _handlers
+from ddtrace.appsec._constants import APPSEC
+from ddtrace.appsec._constants import SPAN_DATA_NAMES
+from ddtrace.appsec._constants import WAF_CONTEXT_NAMES
+from ddtrace.appsec._iast._utils import _is_iast_enabled
+from ddtrace.internal import core
+from ddtrace.internal.constants import REQUEST_PATH_PARAMS
+from ddtrace.internal.logger import get_logger
+from ddtrace.settings.asm import config as asm_config
+from ddtrace.span import Span
+
+
+log = get_logger(__name__)
+
+# Stopgap module for providing ASM context for the blocking features wrapping some contextvars.
+
+_WAF_ADDRESSES = "waf_addresses"
+_CALLBACKS = "callbacks"
+_TELEMETRY = "telemetry"
+_CONTEXT_CALL = "context"
+_WAF_CALL = "waf_run"
+_BLOCK_CALL = "block"
+_WAF_RESULTS = "waf_results"
+
+
+GLOBAL_CALLBACKS: Dict[str, List[Callable]] = {}
+
+
+class ASM_Environment:
+ """
+ an object of this class contains all asm data (waf and telemetry)
+ for a single request. It is bound to a single asm request context.
+ It is contained into a ContextVar.
+ """
+
+ def __init__(self, active: bool = False):
+ self.active: bool = active
+ self.span: Optional[Span] = None
+ self.span_asm_context: Optional[contextlib.AbstractContextManager] = None
+ self.waf_addresses: Dict[str, Any] = {}
+ self.callbacks: Dict[str, Any] = {}
+ self.telemetry: Dict[str, Any] = {}
+ self.addresses_sent: Set[str] = set()
+ self.must_call_globals: bool = True
+ self.waf_triggers: List[Dict[str, Any]] = []
+
+
+def _get_asm_context() -> ASM_Environment:
+ env = core.get_item("asm_env")
+ if env is None:
+ env = ASM_Environment()
+ core.set_item("asm_env", env)
+ return env
+
+
+def free_context_available() -> bool:
+ env = _get_asm_context()
+ return env.active and env.span is None
+
+
+def in_context() -> bool:
+ env = _get_asm_context()
+ return env.active
+
+
+def is_blocked() -> bool:
+ try:
+ env = _get_asm_context()
+ if not env.active or env.span is None:
+ return False
+ return bool(core.get_item(WAF_CONTEXT_NAMES.BLOCKED, span=env.span))
+ except BaseException:
+ return False
+
+
+def register(span: Span, span_asm_context=None) -> None:
+ env = _get_asm_context()
+ if not env.active:
+ log.debug("registering a span with no active asm context")
+ return
+ env.span = span
+ env.span_asm_context = span_asm_context
+
+
+def unregister(span: Span) -> None:
+ env = _get_asm_context()
+ if env.span_asm_context is not None and env.span is span:
+ env.span_asm_context.__exit__(None, None, None)
+ elif env.span is span and env.must_call_globals:
+ # needed for api security flushing information before end of the span
+ for function in GLOBAL_CALLBACKS.get(_CONTEXT_CALL, []):
+ function(env)
+ env.must_call_globals = False
+
+
+def flush_waf_triggers(env: ASM_Environment) -> None:
+ if env.waf_triggers and env.span:
+ root_span = env.span._local_root or env.span
+ old_tags = root_span.get_tag(APPSEC.JSON)
+ if old_tags is not None:
+ try:
+ new_json = json.loads(old_tags)
+ if "triggers" not in new_json:
+ new_json["triggers"] = []
+ new_json["triggers"].extend(env.waf_triggers)
+ except BaseException:
+ new_json = {"triggers": env.waf_triggers}
+ else:
+ new_json = {"triggers": env.waf_triggers}
+ root_span.set_tag_str(APPSEC.JSON, json.dumps(new_json, separators=(",", ":")))
+
+ env.waf_triggers = []
+
+
+GLOBAL_CALLBACKS[_CONTEXT_CALL] = [flush_waf_triggers]
+
+
+class _DataHandler:
+ """
+ An object of this class is created by each asm request context.
+ It handles the creation and destruction of ASM_Environment object.
+ It allows the ASM context to be reentrant.
+ """
+
+ main_id = 0
+
+ def __init__(self):
+ _DataHandler.main_id += 1
+ env = ASM_Environment(True)
+
+ self._id = _DataHandler.main_id
+ self.active = True
+ self.execution_context = core.ExecutionContext(__name__, **{"asm_env": env})
+
+ env.telemetry[_WAF_RESULTS] = [], [], []
+ env.callbacks[_CONTEXT_CALL] = []
+
+ def finalise(self):
+ if self.active:
+ env = self.execution_context.get_item("asm_env")
+ callbacks = GLOBAL_CALLBACKS.get(_CONTEXT_CALL, []) if env.must_call_globals else []
+ env.must_call_globals = False
+ if env is not None and env.callbacks is not None and env.callbacks.get(_CONTEXT_CALL):
+ callbacks += env.callbacks.get(_CONTEXT_CALL)
+ if callbacks:
+ if env is not None:
+ for function in callbacks:
+ function(env)
+ self.execution_context.end()
+ self.active = False
+
+
+def set_value(category: str, address: str, value: Any) -> None:
+ env = _get_asm_context()
+ if not env.active:
+ log.debug("setting %s address %s with no active asm context", category, address)
+ return
+ asm_context_attr = getattr(env, category, None)
+ if asm_context_attr is not None:
+ asm_context_attr[address] = value
+
+
+def set_headers_response(headers: Any) -> None:
+ if headers is not None:
+ set_waf_address(SPAN_DATA_NAMES.RESPONSE_HEADERS_NO_COOKIES, headers, _get_asm_context().span)
+
+
+def set_body_response(body_response):
+ # local import to avoid circular import
+ from ddtrace.appsec._utils import parse_response_body
+
+ parsed_body = parse_response_body(body_response)
+
+ if parse_response_body is not None:
+ set_waf_address(SPAN_DATA_NAMES.RESPONSE_BODY, parsed_body)
+
+
+def set_waf_address(address: str, value: Any, span: Optional[Span] = None) -> None:
+ if address == SPAN_DATA_NAMES.REQUEST_URI_RAW:
+ parse_address = parse.urlparse(value)
+ no_scheme = parse.ParseResult("", "", *parse_address[2:])
+ waf_value = parse.urlunparse(no_scheme)
+ set_value(_WAF_ADDRESSES, address, waf_value)
+ else:
+ set_value(_WAF_ADDRESSES, address, value)
+ if span is None:
+ span = _get_asm_context().span
+ if span:
+ core.set_item(address, value, span=span)
+
+
+def get_value(category: str, address: str, default: Any = None) -> Any:
+ env = _get_asm_context()
+ if not env.active:
+ log.debug("getting %s address %s with no active asm context", category, address)
+ return default
+ asm_context_attr = getattr(env, category, None)
+ if asm_context_attr is not None:
+ return asm_context_attr.get(address, default)
+ return default
+
+
+def get_waf_address(address: str, default: Any = None) -> Any:
+ return get_value(_WAF_ADDRESSES, address, default=default)
+
+
+def get_waf_addresses(default: Any = None) -> Any:
+ env = _get_asm_context()
+ if not env.active:
+ log.debug("getting WAF addresses with no active asm context")
+ return default
+ return env.waf_addresses
+
+
+def add_context_callback(function, global_callback: bool = False) -> None:
+ if global_callback:
+ callbacks = GLOBAL_CALLBACKS.setdefault(_CONTEXT_CALL, [])
+ else:
+ callbacks = get_value(_CALLBACKS, _CONTEXT_CALL)
+ if callbacks is not None:
+ callbacks.append(function)
+
+
+def remove_context_callback(function, global_callback: bool = False) -> None:
+ if global_callback:
+ callbacks = GLOBAL_CALLBACKS.get(_CONTEXT_CALL)
+ else:
+ callbacks = get_value(_CALLBACKS, _CONTEXT_CALL)
+ if callbacks:
+ callbacks[:] = list([cb for cb in callbacks if cb != function])
+
+
+def set_waf_callback(value) -> None:
+ set_value(_CALLBACKS, _WAF_CALL, value)
+
+
+def call_waf_callback(custom_data: Optional[Dict[str, Any]] = None) -> Optional[Dict[str, str]]:
+ if not asm_config._asm_enabled:
+ return None
+ callback = get_value(_CALLBACKS, _WAF_CALL)
+ if callback:
+ return callback(custom_data)
+ else:
+ log.warning("WAF callback called but not set")
+ return None
+
+
+def set_ip(ip: Optional[str]) -> None:
+ if ip is not None:
+ set_waf_address(SPAN_DATA_NAMES.REQUEST_HTTP_IP, ip, _get_asm_context().span)
+
+
+def get_ip() -> Optional[str]:
+ return get_value(_WAF_ADDRESSES, SPAN_DATA_NAMES.REQUEST_HTTP_IP)
+
+
+# Note: get/set headers use Any since we just carry the headers here without changing or using them
+# and different frameworks use different types that we don't want to force it into a Mapping at the
+# early point set_headers is usually called
+
+
+def set_headers(headers: Any) -> None:
+ if headers is not None:
+ set_waf_address(SPAN_DATA_NAMES.REQUEST_HEADERS_NO_COOKIES, headers, _get_asm_context().span)
+
+
+def get_headers() -> Optional[Any]:
+ return get_value(_WAF_ADDRESSES, SPAN_DATA_NAMES.REQUEST_HEADERS_NO_COOKIES, {})
+
+
+def set_headers_case_sensitive(case_sensitive: bool) -> None:
+ set_waf_address(SPAN_DATA_NAMES.REQUEST_HEADERS_NO_COOKIES_CASE, case_sensitive, _get_asm_context().span)
+
+
+def get_headers_case_sensitive() -> bool:
+ return get_value(_WAF_ADDRESSES, SPAN_DATA_NAMES.REQUEST_HEADERS_NO_COOKIES_CASE, False) # type : ignore
+
+
+def set_block_request_callable(_callable: Optional[Callable], *_) -> None:
+ """
+ Sets a callable that could be use to do a best-effort to block the request. If
+ the callable need any params, like headers, they should be curried with
+ functools.partial.
+ """
+ if _callable:
+ set_value(_CALLBACKS, _BLOCK_CALL, _callable)
+
+
+def block_request() -> None:
+ """
+ Calls or returns the stored block request callable, if set.
+ """
+ _callable = get_value(_CALLBACKS, _BLOCK_CALL)
+ if _callable:
+ _callable()
+ else:
+ log.debug("Block request called but block callable not set by framework")
+
+
+def get_data_sent() -> Set[str]:
+ env = _get_asm_context()
+ if not env.active:
+ log.debug("getting addresses sent with no active asm context")
+ return set()
+ return env.addresses_sent
+
+
+def asm_request_context_set(
+ remote_ip: Optional[str] = None,
+ headers: Any = None,
+ headers_case_sensitive: bool = False,
+ block_request_callable: Optional[Callable] = None,
+) -> None:
+ set_ip(remote_ip)
+ set_headers(headers)
+ set_headers_case_sensitive(headers_case_sensitive)
+ set_block_request_callable(block_request_callable)
+
+
+def set_waf_results(result_data, result_info, is_blocked) -> None:
+ three_lists = get_waf_results()
+ if three_lists is not None:
+ list_results_data, list_result_info, list_is_blocked = three_lists
+ list_results_data.append(result_data)
+ list_result_info.append(result_info)
+ list_is_blocked.append(is_blocked)
+
+
+def get_waf_results() -> Optional[Tuple[List[Any], List[Any], List[bool]]]:
+ return get_value(_TELEMETRY, _WAF_RESULTS)
+
+
+def reset_waf_results() -> None:
+ set_value(_TELEMETRY, _WAF_RESULTS, ([], [], []))
+
+
+def store_waf_results_data(data) -> None:
+ if not data:
+ return
+ env = _get_asm_context()
+ if not env.active:
+ log.debug("storing waf results data with no active asm context")
+ return
+ if not env.span:
+ log.debug("storing waf results data with no active span")
+ return
+ for d in data:
+ d["span_id"] = env.span.span_id
+ env.waf_triggers.extend(data)
+
+
+@contextlib.contextmanager
+def asm_request_context_manager(
+ remote_ip: Optional[str] = None,
+ headers: Any = None,
+ headers_case_sensitive: bool = False,
+ block_request_callable: Optional[Callable] = None,
+) -> Generator[Optional[_DataHandler], None, None]:
+ """
+ The ASM context manager
+ """
+ resources = _start_context(remote_ip, headers, headers_case_sensitive, block_request_callable)
+ if resources is not None:
+ try:
+ yield resources
+ finally:
+ _end_context(resources)
+ else:
+ yield None
+
+
+def _start_context(
+ remote_ip: Optional[str], headers: Any, headers_case_sensitive: bool, block_request_callable: Optional[Callable]
+) -> Optional[_DataHandler]:
+ if asm_config._asm_enabled or asm_config._iast_enabled:
+ resources = _DataHandler()
+ if asm_config._asm_enabled:
+ asm_request_context_set(remote_ip, headers, headers_case_sensitive, block_request_callable)
+ _handlers.listen()
+ listen_context_handlers()
+ return resources
+ return None
+
+
+def _on_context_started(ctx):
+ resources = _start_context(
+ ctx.get_item("remote_addr"),
+ ctx.get_item("headers"),
+ ctx.get_item("headers_case_sensitive"),
+ ctx.get_item("block_request_callable"),
+ )
+ ctx.set_item("resources", resources)
+
+
+def _end_context(resources):
+ resources.finalise()
+ core.set_item("asm_env", None)
+
+
+def _on_context_ended(ctx):
+ resources = ctx.get_item("resources")
+ if resources is not None:
+ _end_context(resources)
+
+
+core.on("context.started.wsgi.__call__", _on_context_started)
+core.on("context.ended.wsgi.__call__", _on_context_ended)
+core.on("context.started.django.traced_get_response", _on_context_started)
+core.on("context.ended.django.traced_get_response", _on_context_ended)
+core.on("django.traced_get_response.pre", set_block_request_callable)
+
+
+def _on_wrapped_view(kwargs):
+ return_value = [None, None]
+ # if Appsec is enabled, we can try to block as we have the path parameters at that point
+ if asm_config._asm_enabled and in_context():
+ log.debug("Flask WAF call for Suspicious Request Blocking on request")
+ if kwargs:
+ set_waf_address(REQUEST_PATH_PARAMS, kwargs)
+ call_waf_callback()
+ if is_blocked():
+ callback_block = get_value(_CALLBACKS, "flask_block")
+ return_value[0] = callback_block
+
+ # If IAST is enabled, taint the Flask function kwargs (path parameters)
+ if _is_iast_enabled() and kwargs:
+ from ddtrace.appsec._iast._taint_tracking import OriginType
+ from ddtrace.appsec._iast._taint_tracking import taint_pyobject
+ from ddtrace.appsec._iast.processor import AppSecIastSpanProcessor
+
+ if not AppSecIastSpanProcessor.is_span_analyzed():
+ return return_value
+
+ _kwargs = {}
+ for k, v in kwargs.items():
+ _kwargs[k] = taint_pyobject(
+ pyobject=v, source_name=k, source_value=v, source_origin=OriginType.PATH_PARAMETER
+ )
+ return_value[1] = _kwargs
+ return return_value
+
+
+def _on_set_request_tags(request, span, flask_config):
+ if _is_iast_enabled():
+ from ddtrace.appsec._iast._metrics import _set_metric_iast_instrumented_source
+ from ddtrace.appsec._iast._taint_tracking import OriginType
+ from ddtrace.appsec._iast._taint_utils import taint_structure
+ from ddtrace.appsec._iast.processor import AppSecIastSpanProcessor
+
+ _set_metric_iast_instrumented_source(OriginType.COOKIE_NAME)
+ _set_metric_iast_instrumented_source(OriginType.COOKIE)
+
+ if not AppSecIastSpanProcessor.is_span_analyzed(span._local_root or span):
+ return
+
+ request.cookies = taint_structure(
+ request.cookies,
+ OriginType.COOKIE_NAME,
+ OriginType.COOKIE,
+ override_pyobject_tainted=True,
+ )
+
+
+def _on_pre_tracedrequest(ctx):
+ _on_set_request_tags(ctx.get_item("flask_request"), ctx["current_span"], ctx.get_item("flask_config"))
+ block_request_callable = ctx.get_item("block_request_callable")
+ current_span = ctx["current_span"]
+ if asm_config._asm_enabled:
+ set_block_request_callable(functools.partial(block_request_callable, current_span))
+ if core.get_item(WAF_CONTEXT_NAMES.BLOCKED):
+ block_request()
+
+
+def _set_headers_and_response(response, headers, *_):
+ if not asm_config._asm_enabled:
+ return
+
+ from ddtrace.appsec._utils import _appsec_apisec_features_is_active
+
+ if _appsec_apisec_features_is_active():
+ if headers:
+ # start_response was not called yet, set the HTTP response headers earlier
+ if isinstance(headers, dict):
+ list_headers = list(headers.items())
+ else:
+ list_headers = list(headers)
+ set_headers_response(list_headers)
+ if response and asm_config._api_security_parse_response_body:
+ set_body_response(response)
+
+
+def _call_waf_first(integration, *_):
+ if not asm_config._asm_enabled:
+ return
+
+ log.debug("%s WAF call for Suspicious Request Blocking on request", integration)
+ return call_waf_callback()
+
+
+def _call_waf(integration, *_):
+ if not asm_config._asm_enabled:
+ return
+
+ log.debug("%s WAF call for Suspicious Request Blocking on response", integration)
+ return call_waf_callback()
+
+
+def _on_block_decided(callback):
+ if not asm_config._asm_enabled:
+ return
+
+ set_value(_CALLBACKS, "flask_block", callback)
+
+
+def _get_headers_if_appsec():
+ if asm_config._asm_enabled:
+ return get_headers()
+
+
+def listen_context_handlers():
+ core.on("flask.finalize_request.post", _set_headers_and_response)
+ core.on("flask.wrapped_view", _on_wrapped_view, "callback_and_args")
+ core.on("flask._patched_request", _on_pre_tracedrequest)
+ core.on("wsgi.block_decided", _on_block_decided)
+ core.on("flask.start_response", _call_waf_first, "waf")
+
+ core.on("django.start_response.post", _call_waf_first)
+ core.on("django.finalize_response", _call_waf)
+ core.on("django.after_request_headers", _get_headers_if_appsec, "headers")
+ core.on("django.extract_body", _get_headers_if_appsec, "headers")
+ core.on("django.after_request_headers.finalize", _set_headers_and_response)
+ core.on("flask.set_request_tags", _on_set_request_tags)
+
+ core.on("asgi.start_request", _call_waf_first)
+ core.on("asgi.start_response", _call_waf)
+ core.on("asgi.finalize_response", _set_headers_and_response)
diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_capabilities.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_capabilities.py
new file mode 100644
index 0000000..b5ac759
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_capabilities.py
@@ -0,0 +1,77 @@
+import base64
+import enum
+import os
+from typing import Optional
+
+import ddtrace
+from ddtrace.appsec._utils import _appsec_rc_features_is_enabled
+from ddtrace.settings.asm import config as asm_config
+
+
+def _appsec_rc_file_is_not_static():
+ return "DD_APPSEC_RULES" not in os.environ
+
+
+def _asm_feature_is_required():
+ flags = _rc_capabilities()
+ return Flags.ASM_ACTIVATION in flags or Flags.ASM_API_SECURITY_SAMPLE_RATE in flags
+
+
+class Flags(enum.IntFlag):
+ ASM_ACTIVATION = 1 << 1
+ ASM_IP_BLOCKING = 1 << 2
+ ASM_DD_RULES = 1 << 3
+ ASM_EXCLUSIONS = 1 << 4
+ ASM_REQUEST_BLOCKING = 1 << 5
+ ASM_ASM_RESPONSE_BLOCKING = 1 << 6
+ ASM_USER_BLOCKING = 1 << 7
+ ASM_CUSTOM_RULES = 1 << 8
+ ASM_CUSTOM_BLOCKING_RESPONSE = 1 << 9
+ ASM_TRUSTED_IPS = 1 << 10
+ ASM_API_SECURITY_SAMPLE_RATE = 1 << 11
+
+
+_ALL_ASM_BLOCKING = (
+ Flags.ASM_IP_BLOCKING
+ | Flags.ASM_DD_RULES
+ | Flags.ASM_EXCLUSIONS
+ | Flags.ASM_REQUEST_BLOCKING
+ | Flags.ASM_ASM_RESPONSE_BLOCKING
+ | Flags.ASM_USER_BLOCKING
+ | Flags.ASM_CUSTOM_RULES
+ | Flags.ASM_CUSTOM_RULES
+ | Flags.ASM_CUSTOM_BLOCKING_RESPONSE
+)
+
+
+def _rc_capabilities(test_tracer: Optional[ddtrace.Tracer] = None) -> Flags:
+ tracer = ddtrace.tracer if test_tracer is None else test_tracer
+ value = Flags(0)
+ if ddtrace.config._remote_config_enabled:
+ if _appsec_rc_features_is_enabled():
+ value |= Flags.ASM_ACTIVATION
+ if tracer._appsec_processor and _appsec_rc_file_is_not_static():
+ value |= _ALL_ASM_BLOCKING
+ if asm_config._api_security_enabled:
+ value |= Flags.ASM_API_SECURITY_SAMPLE_RATE
+ return value
+
+
+def _appsec_rc_capabilities(test_tracer: Optional[ddtrace.Tracer] = None) -> str:
+ r"""return the bit representation of the composed capabilities in base64
+ bit 0: Reserved
+ bit 1: ASM 1-click Activation
+ bit 2: ASM Ip blocking
+
+ Int Number -> binary number -> bytes representation -> base64 representation
+ ASM Activation:
+ 2 -> 10 -> b'\x02' -> "Ag=="
+ ASM Ip blocking:
+ 4 -> 100 -> b'\x04' -> "BA=="
+ ASM Activation and ASM Ip blocking:
+ 6 -> 110 -> b'\x06' -> "Bg=="
+ ...
+ 256 -> 100000000 -> b'\x01\x00' -> b'AQA='
+ """
+ value = _rc_capabilities(test_tracer=test_tracer)
+ return base64.b64encode(value.to_bytes((value.bit_length() + 7) // 8, "big")).decode()
diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_constants.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_constants.py
new file mode 100644
index 0000000..fc0f4a4
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_constants.py
@@ -0,0 +1,216 @@
+import os
+from typing import Any
+from typing import Iterator
+
+from ddtrace.internal.constants import HTTP_REQUEST_BLOCKED
+from ddtrace.internal.constants import REQUEST_PATH_PARAMS
+from ddtrace.internal.constants import RESPONSE_HEADERS
+from ddtrace.internal.constants import STATUS_403_TYPE_AUTO
+
+
+class Constant_Class(type):
+ """
+ metaclass for Constant Classes
+ - You can access constants with APPSEC.ENV or APPSEC["ENV"]
+ - Direct assignment will fail: APPSEC.ENV = "something" raise TypeError, like other immutable types
+ - Constant Classes can be iterated:
+ for constant_name, constant_value in APPSEC: ...
+ """
+
+ def __setattr__(self, __name: str, __value: Any) -> None:
+ raise TypeError("Constant class does not support item assignment: %s.%s" % (self.__name__, __name))
+
+ def __iter__(self) -> Iterator[str]:
+ def aux():
+ for t in self.__dict__.items():
+ if not t[0].startswith("_"):
+ yield t
+
+ return aux()
+
+ def get(self, k: str, default: Any = None) -> Any:
+ return self.__dict__.get(k, default)
+
+ def __contains__(self, k: str) -> bool:
+ return k in self.__dict__
+
+ def __getitem__(self, k: str) -> Any:
+ return self.__dict__[k]
+
+
+class APPSEC(metaclass=Constant_Class):
+ """Specific constants for AppSec"""
+
+ ENV = "DD_APPSEC_ENABLED"
+ ENABLED = "_dd.appsec.enabled"
+ JSON = "_dd.appsec.json"
+ EVENT_RULE_VERSION = "_dd.appsec.event_rules.version"
+ EVENT_RULE_ERRORS = "_dd.appsec.event_rules.errors"
+ EVENT_RULE_LOADED = "_dd.appsec.event_rules.loaded"
+ EVENT_RULE_ERROR_COUNT = "_dd.appsec.event_rules.error_count"
+ WAF_DURATION = "_dd.appsec.waf.duration"
+ WAF_DURATION_EXT = "_dd.appsec.waf.duration_ext"
+ WAF_TIMEOUTS = "_dd.appsec.waf.timeouts"
+ WAF_VERSION = "_dd.appsec.waf.version"
+ ORIGIN_VALUE = "appsec"
+ CUSTOM_EVENT_PREFIX = "appsec.events"
+ USER_LOGIN_EVENT_PREFIX = "_dd.appsec.events.users.login"
+ USER_LOGIN_EVENT_PREFIX_PUBLIC = "appsec.events.users.login"
+ USER_LOGIN_EVENT_SUCCESS_TRACK = "appsec.events.users.login.success.track"
+ USER_LOGIN_EVENT_FAILURE_TRACK = "appsec.events.users.login.failure.track"
+ USER_SIGNUP_EVENT = "appsec.events.users.signup.track"
+ AUTO_LOGIN_EVENTS_SUCCESS_MODE = "_dd.appsec.events.users.login.success.auto.mode"
+ AUTO_LOGIN_EVENTS_FAILURE_MODE = "_dd.appsec.events.users.login.failure.auto.mode"
+ BLOCKED = "appsec.blocked"
+ EVENT = "appsec.event"
+ AUTOMATIC_USER_EVENTS_TRACKING = "DD_APPSEC_AUTOMATED_USER_EVENTS_TRACKING"
+ USER_MODEL_LOGIN_FIELD = "DD_USER_MODEL_LOGIN_FIELD"
+ USER_MODEL_EMAIL_FIELD = "DD_USER_MODEL_EMAIL_FIELD"
+ USER_MODEL_NAME_FIELD = "DD_USER_MODEL_NAME_FIELD"
+
+
+class IAST(metaclass=Constant_Class):
+ """Specific constants for IAST"""
+
+ ENV = "DD_IAST_ENABLED"
+ ENV_DEBUG = "_DD_IAST_DEBUG"
+ TELEMETRY_REPORT_LVL = "DD_IAST_TELEMETRY_VERBOSITY"
+ LAZY_TAINT = "_DD_IAST_LAZY_TAINT"
+ JSON = "_dd.iast.json"
+ ENABLED = "_dd.iast.enabled"
+ CONTEXT_KEY = "_iast_data"
+ PATCH_MODULES = "_DD_IAST_PATCH_MODULES"
+ DENY_MODULES = "_DD_IAST_DENY_MODULES"
+ SEP_MODULES = ","
+ REQUEST_IAST_ENABLED = "_dd.iast.request_enabled"
+
+
+class IAST_SPAN_TAGS(metaclass=Constant_Class):
+ """Specific constants for IAST span tags"""
+
+ TELEMETRY_REQUEST_TAINTED = "_dd.iast.telemetry.request.tainted"
+ TELEMETRY_EXECUTED_SINK = "_dd.iast.telemetry.executed.sink"
+
+
+class WAF_DATA_NAMES(metaclass=Constant_Class):
+ """string names used by the waf library for requesting data from requests"""
+
+ REQUEST_BODY = "server.request.body"
+ REQUEST_QUERY = "server.request.query"
+ REQUEST_HEADERS_NO_COOKIES = "server.request.headers.no_cookies"
+ REQUEST_URI_RAW = "server.request.uri.raw"
+ REQUEST_METHOD = "server.request.method"
+ REQUEST_PATH_PARAMS = "server.request.path_params"
+ REQUEST_COOKIES = "server.request.cookies"
+ REQUEST_HTTP_IP = "http.client_ip"
+ REQUEST_USER_ID = "usr.id"
+ RESPONSE_STATUS = "server.response.status"
+ RESPONSE_HEADERS_NO_COOKIES = "server.response.headers.no_cookies"
+ RESPONSE_BODY = "server.response.body"
+ PROCESSOR_SETTINGS = "waf.context.processor"
+
+
+class SPAN_DATA_NAMES(metaclass=Constant_Class):
+ """string names used by the library for tagging data from requests in context or span"""
+
+ REQUEST_BODY = "http.request.body"
+ REQUEST_QUERY = "http.request.query"
+ REQUEST_HEADERS_NO_COOKIES = "http.request.headers"
+ REQUEST_HEADERS_NO_COOKIES_CASE = "http.request.headers_case_sensitive"
+ REQUEST_URI_RAW = "http.request.uri"
+ REQUEST_ROUTE = "http.request.route"
+ REQUEST_METHOD = "http.request.method"
+ REQUEST_PATH_PARAMS = REQUEST_PATH_PARAMS
+ REQUEST_COOKIES = "http.request.cookies"
+ REQUEST_HTTP_IP = "http.request.remote_ip"
+ REQUEST_USER_ID = "usr.id"
+ RESPONSE_STATUS = "http.response.status"
+ RESPONSE_HEADERS_NO_COOKIES = RESPONSE_HEADERS
+ RESPONSE_BODY = "http.response.body"
+
+
+class API_SECURITY(metaclass=Constant_Class):
+ """constants related to API Security"""
+
+ ENABLED = "_dd.appsec.api_security.enabled"
+ ENV_VAR_ENABLED = "DD_API_SECURITY_ENABLED"
+ PARSE_RESPONSE_BODY = "DD_API_SECURITY_PARSE_RESPONSE_BODY"
+ REQUEST_HEADERS_NO_COOKIES = "_dd.appsec.s.req.headers"
+ REQUEST_COOKIES = "_dd.appsec.s.req.cookies"
+ REQUEST_QUERY = "_dd.appsec.s.req.query"
+ REQUEST_PATH_PARAMS = "_dd.appsec.s.req.params"
+ REQUEST_BODY = "_dd.appsec.s.req.body"
+ RESPONSE_HEADERS_NO_COOKIES = "_dd.appsec.s.res.headers"
+ RESPONSE_BODY = "_dd.appsec.s.res.body"
+ SAMPLE_RATE = "DD_API_SECURITY_REQUEST_SAMPLE_RATE"
+ MAX_PAYLOAD_SIZE = 0x1000000 # 16MB maximum size
+
+
+class WAF_CONTEXT_NAMES(metaclass=Constant_Class):
+ """string names used by the library for tagging data from requests in context"""
+
+ RESULTS = "http.request.waf.results"
+ BLOCKED = HTTP_REQUEST_BLOCKED
+ CALLBACK = "http.request.waf.callback"
+
+
+class WAF_ACTIONS(metaclass=Constant_Class):
+ """string identifier for actions returned by the waf"""
+
+ BLOCK = "block"
+ PARAMETERS = "parameters"
+ TYPE = "type"
+ ID = "id"
+ DEFAULT_PARAMETERS = STATUS_403_TYPE_AUTO
+ BLOCK_ACTION = "block_request"
+ REDIRECT_ACTION = "redirect_request"
+ DEFAULT_ACTIONS = {
+ BLOCK: {
+ ID: BLOCK,
+ TYPE: BLOCK_ACTION,
+ PARAMETERS: DEFAULT_PARAMETERS,
+ }
+ }
+
+
+class PRODUCTS(metaclass=Constant_Class):
+ """string identifier for remote config products"""
+
+ ASM = "ASM"
+ ASM_DATA = "ASM_DATA"
+ ASM_DD = "ASM_DD"
+ ASM_FEATURES = "ASM_FEATURES"
+
+
+class LOGIN_EVENTS_MODE(metaclass=Constant_Class):
+ """
+ string identifier for the mode of the user login events. Can be:
+ DISABLED: automatic login events are disabled.
+ SAFE: automatic login events are enabled but will only store non-PII fields (id, pk uid...)
+ EXTENDED: automatic login events are enabled and will store potentially PII fields (username,
+ email, ...).
+ SDK: manually issued login events using the SDK.
+ """
+
+ DISABLED = "disabled"
+ SAFE = "safe"
+ EXTENDED = "extended"
+ SDK = "sdk"
+
+
+class DEFAULT(metaclass=Constant_Class):
+ ROOT_DIR = os.path.dirname(os.path.abspath(__file__))
+ RULES = os.path.join(ROOT_DIR, "rules.json")
+ TRACE_RATE_LIMIT = 100
+ WAF_TIMEOUT = 5.0 # float (milliseconds)
+ APPSEC_OBFUSCATION_PARAMETER_KEY_REGEXP = (
+ rb"(?i)(?:p(?:ass)?w(?:or)?d|pass(?:_?phrase)?|secret|(?:api_?|private_?|public_?)key)|token|consumer_?"
+ rb"(?:id|key|secret)|sign(?:ed|ature)|bearer|authorization"
+ )
+ APPSEC_OBFUSCATION_PARAMETER_VALUE_REGEXP = (
+ rb"(?i)(?:p(?:ass)?w(?:or)?d|pass(?:_?phrase)?|secret|(?:api_?|private_?|public_?|access_?|secret_?)"
+ rb"key(?:_?id)?|token|consumer_?(?:id|key|secret)|sign(?:ed|ature)?|auth(?:entication|orization)?)"
+ rb'(?:\s*=[^;]|"\s*:\s*"[^"]+")|bearer\s+[a-z0-9\._\-]+|token:[a-z0-9]{13}|gh[opsu]_[0-9a-zA-Z]{36}'
+ rb"|ey[I-L][\w=-]+\.ey[I-L][\w=-]+(?:\.[\w.+\/=-]+)?|[\-]{5}BEGIN[a-z\s]+PRIVATE\sKEY[\-]{5}[^\-]+[\-]"
+ rb"{5}END[a-z\s]+PRIVATE\sKEY|ssh-rsa\s*[a-z0-9\/\.+]{100,}"
+ )
diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_ddwaf/__init__.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_ddwaf/__init__.py
new file mode 100644
index 0000000..cee6eda
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_ddwaf/__init__.py
@@ -0,0 +1,214 @@
+import ctypes
+import time
+from typing import Any
+from typing import Dict
+from typing import List
+from typing import Optional
+
+from ddtrace.appsec._constants import DEFAULT
+from ddtrace.internal.logger import get_logger
+from ddtrace.settings.asm import config as asm_config
+
+
+LOGGER = get_logger(__name__)
+
+if asm_config._asm_libddwaf_available:
+ try:
+ from .ddwaf_types import DDWafRulesType
+ from .ddwaf_types import _observator
+ from .ddwaf_types import ddwaf_config
+ from .ddwaf_types import ddwaf_context_capsule
+ from .ddwaf_types import ddwaf_get_version
+ from .ddwaf_types import ddwaf_object
+ from .ddwaf_types import ddwaf_object_free
+ from .ddwaf_types import ddwaf_result
+ from .ddwaf_types import ddwaf_run
+ from .ddwaf_types import py_ddwaf_context_init
+ from .ddwaf_types import py_ddwaf_init
+ from .ddwaf_types import py_ddwaf_known_addresses
+ from .ddwaf_types import py_ddwaf_update
+
+ _DDWAF_LOADED = True
+ except BaseException:
+ _DDWAF_LOADED = False
+ LOGGER.warning("DDWaf features disabled. WARNING: Dynamic Library not loaded", exc_info=True)
+else:
+ _DDWAF_LOADED = False
+
+
+class DDWaf_result(object):
+ __slots__ = ["data", "actions", "runtime", "total_runtime", "timeout", "truncation", "derivatives"]
+
+ def __init__(
+ self,
+ data: Optional[str],
+ actions: List[str],
+ runtime: float,
+ total_runtime: float,
+ timeout: bool,
+ truncation: int,
+ derivatives: Dict[str, Any],
+ ):
+ self.data = data
+ self.actions = actions
+ self.runtime = runtime
+ self.total_runtime = total_runtime
+ self.timeout = timeout
+ self.truncation = truncation
+ self.derivatives = derivatives
+
+
+class DDWaf_info(object):
+ __slots__ = ["loaded", "failed", "errors", "version"]
+
+ def __init__(self, loaded: int, failed: int, errors: Dict[str, Any], version: str):
+ self.loaded = loaded
+ self.failed = failed
+ self.errors = errors
+ self.version = version
+
+ def __repr__(self):
+ return "{loaded: %d, failed: %d, errors: %s, version: %s}" % (
+ self.loaded,
+ self.failed,
+ str(self.errors),
+ self.version,
+ )
+
+
+if _DDWAF_LOADED:
+
+ class DDWaf(object):
+ def __init__(
+ self,
+ ruleset_map: Dict[str, Any],
+ obfuscation_parameter_key_regexp: bytes,
+ obfuscation_parameter_value_regexp: bytes,
+ ):
+ config = ddwaf_config(
+ key_regex=obfuscation_parameter_key_regexp, value_regex=obfuscation_parameter_value_regexp
+ )
+ diagnostics = ddwaf_object()
+ ruleset_map_object = ddwaf_object.create_without_limits(ruleset_map)
+ self._handle = py_ddwaf_init(ruleset_map_object, ctypes.byref(config), ctypes.byref(diagnostics))
+ self._set_info(diagnostics)
+ info = self.info
+ if not self._handle or info.failed:
+ # We keep the handle alive in case of errors, as some valid rules can be loaded
+ # at the same time some invalid ones are rejected
+ LOGGER.debug(
+ "DDWAF.__init__: invalid rules\n ruleset: %s\nloaded:%s\nerrors:%s\n",
+ ruleset_map_object.struct,
+ info.failed,
+ info.errors,
+ )
+ ddwaf_object_free(ctypes.byref(ruleset_map_object))
+
+ @property
+ def required_data(self) -> List[str]:
+ return py_ddwaf_known_addresses(self._handle) if self._handle else []
+
+ def _set_info(self, diagnostics: ddwaf_object) -> None:
+ info_struct = diagnostics.struct
+ rules = info_struct.get("rules", {}) if info_struct else {} # type: ignore
+ errors_result = rules.get("errors", {})
+ version = info_struct.get("ruleset_version", "") if info_struct else "" # type: ignore
+ self._info = DDWaf_info(len(rules.get("loaded", [])), len(rules.get("failed", [])), errors_result, version)
+ ddwaf_object_free(diagnostics)
+
+ @property
+ def info(self) -> DDWaf_info:
+ return self._info
+
+ def update_rules(self, new_rules: Dict[str, DDWafRulesType]) -> bool:
+ """update the rules of the WAF instance. return True if an error occurs."""
+ rules = ddwaf_object.create_without_limits(new_rules)
+ diagnostics = ddwaf_object()
+ result = py_ddwaf_update(self._handle, rules, diagnostics)
+ self._set_info(diagnostics)
+ ddwaf_object_free(rules)
+ if result:
+ LOGGER.debug("DDWAF.update_rules success.\ninfo %s", self.info)
+ self._handle = result
+ return True
+ else:
+ LOGGER.debug("DDWAF.update_rules: keeping the previous handle.")
+ return False
+
+ def _at_request_start(self) -> Optional[ddwaf_context_capsule]:
+ ctx = None
+ if self._handle:
+ ctx = py_ddwaf_context_init(self._handle)
+ if not ctx:
+ LOGGER.debug("DDWaf._at_request_start: failure to create the context.")
+ return ctx
+
+ def _at_request_end(self) -> None:
+ pass
+
+ def run(
+ self,
+ ctx: ddwaf_context_capsule,
+ data: DDWafRulesType,
+ timeout_ms: float = DEFAULT.WAF_TIMEOUT,
+ ) -> DDWaf_result:
+ start = time.time()
+ if not ctx:
+ LOGGER.debug("DDWaf.run: dry run. no context created.")
+ return DDWaf_result(None, [], 0, (time.time() - start) * 1e6, False, 0, {})
+
+ result = ddwaf_result()
+ observator = _observator()
+ wrapper = ddwaf_object(data, observator=observator)
+ error = ddwaf_run(ctx.ctx, wrapper, None, ctypes.byref(result), int(timeout_ms * 1000))
+ if error < 0:
+ LOGGER.debug("run DDWAF error: %d\ninput %s\nerror %s", error, wrapper.struct, self.info.errors)
+ return DDWaf_result(
+ result.events.struct,
+ result.actions.struct,
+ result.total_runtime / 1e3,
+ (time.time() - start) * 1e6,
+ result.timeout,
+ observator.truncation,
+ result.derivatives.struct,
+ )
+
+ def version() -> str:
+ return ddwaf_get_version().decode("UTF-8")
+
+else:
+ # Mockup of the DDWaf class doing nothing
+ class DDWaf(object): # type: ignore
+ required_data: List[str] = []
+ info: DDWaf_info = DDWaf_info(0, 0, {}, "")
+
+ def __init__(
+ self,
+ rules: Dict[str, Any],
+ obfuscation_parameter_key_regexp: bytes,
+ obfuscation_parameter_value_regexp: bytes,
+ ):
+ self._handle = None
+
+ def run(
+ self,
+ ctx: Any,
+ data: Any,
+ timeout_ms: float = DEFAULT.WAF_TIMEOUT,
+ ) -> DDWaf_result:
+ LOGGER.debug("DDWaf features disabled. dry run")
+ return DDWaf_result(None, [], 0.0, 0.0, False, 0, {})
+
+ def update_rules(self, _: Dict[str, Any]) -> bool:
+ LOGGER.debug("DDWaf features disabled. dry update")
+ return False
+
+ def _at_request_start(self) -> None:
+ return None
+
+ def _at_request_end(self) -> None:
+ pass
+
+ def version() -> str:
+ LOGGER.debug("DDWaf features disabled. null version")
+ return "0.0.0"
diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_ddwaf/ddwaf_types.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_ddwaf/ddwaf_types.py
new file mode 100644
index 0000000..aecfd79
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_ddwaf/ddwaf_types.py
@@ -0,0 +1,555 @@
+import ctypes
+import ctypes.util
+from enum import IntEnum
+from platform import machine
+from platform import system
+from typing import Any
+from typing import Dict
+from typing import List
+from typing import Union
+
+from ddtrace.internal.logger import get_logger
+from ddtrace.settings.asm import config as asm_config
+
+
+DDWafRulesType = Union[None, int, str, List[Any], Dict[str, Any]]
+
+log = get_logger(__name__)
+
+#
+# Dynamic loading of libddwaf. For now it requires the file or a link to be in current directory
+#
+
+if system() == "Linux":
+ try:
+ ctypes.CDLL(ctypes.util.find_library("rt"), mode=ctypes.RTLD_GLOBAL)
+ except BaseException: # nosec
+ pass
+
+ARCHI = machine().lower()
+
+# 32-bit-Python on 64-bit-Windows
+
+ddwaf = ctypes.CDLL(asm_config._asm_libddwaf)
+#
+# Constants
+#
+
+DDWAF_MAX_STRING_LENGTH = 4096
+DDWAF_MAX_CONTAINER_DEPTH = 20
+DDWAF_MAX_CONTAINER_SIZE = 256
+DDWAF_NO_LIMIT = 1 << 31
+DDWAF_DEPTH_NO_LIMIT = 1000
+_TRUNC_STRING_LENGTH = 1
+_TRUNC_CONTAINER_DEPTH = 4
+_TRUNC_CONTAINER_SIZE = 2
+
+
+class DDWAF_OBJ_TYPE(IntEnum):
+ DDWAF_OBJ_INVALID = 0
+ # Value shall be decoded as a int64_t (or int32_t on 32bits platforms).
+ DDWAF_OBJ_SIGNED = 1 << 0
+ # Value shall be decoded as a uint64_t (or uint32_t on 32bits platforms).
+ DDWAF_OBJ_UNSIGNED = 1 << 1
+ # Value shall be decoded as a UTF-8 string of length nbEntries.
+ DDWAF_OBJ_STRING = 1 << 2
+ # Value shall be decoded as an array of ddwaf_object of length nbEntries, each item having no parameterName.
+ DDWAF_OBJ_ARRAY = 1 << 3
+ # Value shall be decoded as an array of ddwaf_object of length nbEntries, each item having a parameterName.
+ DDWAF_OBJ_MAP = 1 << 4
+ # Value shall be decode as bool
+ DDWAF_OBJ_BOOL = 1 << 5
+ # 64-bit float (or double) type
+ DDWAF_OBJ_FLOAT = 1 << 6
+ # Null type, only used for its semantical value
+ DDWAF_OBJ_NULL = 1 << 7
+
+
+class DDWAF_RET_CODE(IntEnum):
+ DDWAF_ERR_INTERNAL = -3
+ DDWAF_ERR_INVALID_OBJECT = -2
+ DDWAF_ERR_INVALID_ARGUMENT = -1
+ DDWAF_OK = 0
+ DDWAF_MATCH = 1
+
+
+class DDWAF_LOG_LEVEL(IntEnum):
+ DDWAF_LOG_TRACE = 0
+ DDWAF_LOG_DEBUG = 1
+ DDWAF_LOG_INFO = 2
+ DDWAF_LOG_WARN = 3
+ DDWAF_LOG_ERROR = 4
+ DDWAF_LOG_OFF = 5
+
+
+#
+# Objects Definitions
+#
+
+# obj_struct = DDWafRulesType
+
+
+class _observator:
+ def __init__(self):
+ self.truncation = 0
+
+
+# to allow cyclic references, ddwaf_object fields are defined later
+class ddwaf_object(ctypes.Structure):
+ # "type" define how to read the "value" union field
+ # defined in ddwaf.h
+ # 1 is intValue
+ # 2 is uintValue
+ # 4 is stringValue as UTF-8 encoded
+ # 8 is array of length "nbEntries" without parameterName
+ # 16 is a map : array of length "nbEntries" with parameterName
+ # 32 is boolean
+
+ def __init__(
+ self,
+ struct: DDWafRulesType = None,
+ observator: _observator = _observator(), # noqa : B008
+ max_objects: int = DDWAF_MAX_CONTAINER_SIZE,
+ max_depth: int = DDWAF_MAX_CONTAINER_DEPTH,
+ max_string_length: int = DDWAF_MAX_STRING_LENGTH,
+ ) -> None:
+ def truncate_string(string: bytes) -> bytes:
+ if len(string) > max_string_length - 1:
+ observator.truncation |= _TRUNC_STRING_LENGTH
+ # difference of 1 to take null char at the end on the C side into account
+ return string[: max_string_length - 1]
+ return string
+
+ if isinstance(struct, bool):
+ ddwaf_object_bool(self, struct)
+ elif isinstance(struct, int):
+ ddwaf_object_signed(self, struct)
+ elif isinstance(struct, str):
+ ddwaf_object_string(self, truncate_string(struct.encode("UTF-8", errors="ignore")))
+ elif isinstance(struct, bytes):
+ ddwaf_object_string(self, truncate_string(struct))
+ elif isinstance(struct, float):
+ ddwaf_object_float(self, struct)
+ elif isinstance(struct, list):
+ if max_depth <= 0:
+ observator.truncation |= _TRUNC_CONTAINER_DEPTH
+ max_objects = 0
+ array = ddwaf_object_array(self)
+ for counter_object, elt in enumerate(struct):
+ if counter_object >= max_objects:
+ observator.truncation |= _TRUNC_CONTAINER_SIZE
+ break
+ obj = ddwaf_object(
+ elt,
+ observator=observator,
+ max_objects=max_objects,
+ max_depth=max_depth - 1,
+ max_string_length=max_string_length,
+ )
+ ddwaf_object_array_add(array, obj)
+ elif isinstance(struct, dict):
+ if max_depth <= 0:
+ observator.truncation |= _TRUNC_CONTAINER_DEPTH
+ max_objects = 0
+ map_o = ddwaf_object_map(self)
+ # order is unspecified and could lead to problems if max_objects is reached
+ for counter_object, (key, val) in enumerate(struct.items()):
+ if not isinstance(key, (bytes, str)): # discards non string keys
+ continue
+ if counter_object >= max_objects:
+ observator.truncation |= _TRUNC_CONTAINER_SIZE
+ break
+ res_key = truncate_string(key.encode("UTF-8", errors="ignore") if isinstance(key, str) else key)
+ obj = ddwaf_object(
+ val,
+ observator=observator,
+ max_objects=max_objects,
+ max_depth=max_depth - 1,
+ max_string_length=max_string_length,
+ )
+ ddwaf_object_map_add(map_o, res_key, obj)
+ elif struct is not None:
+ ddwaf_object_string(self, truncate_string(str(struct).encode("UTF-8", errors="ignore")))
+ else:
+ ddwaf_object_null(self)
+
+ @classmethod
+ def create_without_limits(cls, struct: DDWafRulesType) -> "ddwaf_object":
+ return cls(struct, max_objects=DDWAF_NO_LIMIT, max_depth=DDWAF_DEPTH_NO_LIMIT, max_string_length=DDWAF_NO_LIMIT)
+
+ @property
+ def struct(self) -> DDWafRulesType:
+ """Generate a python structure from ddwaf_object"""
+ if self.type == DDWAF_OBJ_TYPE.DDWAF_OBJ_STRING:
+ return self.value.stringValue.decode("UTF-8", errors="ignore")
+ if self.type == DDWAF_OBJ_TYPE.DDWAF_OBJ_MAP:
+ return {
+ self.value.array[i].parameterName.decode("UTF-8", errors="ignore"): self.value.array[i].struct
+ for i in range(self.nbEntries)
+ }
+ if self.type == DDWAF_OBJ_TYPE.DDWAF_OBJ_ARRAY:
+ return [self.value.array[i].struct for i in range(self.nbEntries)]
+ if self.type == DDWAF_OBJ_TYPE.DDWAF_OBJ_SIGNED:
+ return self.value.intValue
+ if self.type == DDWAF_OBJ_TYPE.DDWAF_OBJ_UNSIGNED:
+ return self.value.uintValue
+ if self.type == DDWAF_OBJ_TYPE.DDWAF_OBJ_BOOL:
+ return self.value.boolean
+ if self.type == DDWAF_OBJ_TYPE.DDWAF_OBJ_FLOAT:
+ return self.value.f64
+ if self.type == DDWAF_OBJ_TYPE.DDWAF_OBJ_NULL or self.type == DDWAF_OBJ_TYPE.DDWAF_OBJ_INVALID:
+ return None
+ log.debug("ddwaf_object struct: unknown object type: %s", repr(type(self.type)))
+ return None
+
+ def __repr__(self):
+ return repr(self.struct)
+
+
+ddwaf_object_p = ctypes.POINTER(ddwaf_object)
+
+
+class ddwaf_value(ctypes.Union):
+ _fields_ = [
+ ("stringValue", ctypes.c_char_p),
+ ("uintValue", ctypes.c_ulonglong),
+ ("intValue", ctypes.c_longlong),
+ ("array", ddwaf_object_p),
+ ("boolean", ctypes.c_bool),
+ ("f64", ctypes.c_double),
+ ]
+
+
+ddwaf_object._fields_ = [
+ ("parameterName", ctypes.c_char_p),
+ ("parameterNameLength", ctypes.c_uint64),
+ ("value", ddwaf_value),
+ ("nbEntries", ctypes.c_uint64),
+ ("type", ctypes.c_int),
+]
+
+
+class ddwaf_result(ctypes.Structure):
+ _fields_ = [
+ ("timeout", ctypes.c_bool),
+ ("events", ddwaf_object),
+ ("actions", ddwaf_object),
+ ("derivatives", ddwaf_object),
+ ("total_runtime", ctypes.c_uint64),
+ ]
+
+ def __repr__(self):
+ return "total_runtime=%r, events=%r, timeout=%r, action=[%r]" % (
+ self.total_runtime,
+ self.events.struct,
+ self.timeout.struct,
+ self.actions,
+ )
+
+ def __del__(self):
+ try:
+ ddwaf_result_free(self)
+ except TypeError:
+ pass
+
+
+ddwaf_result_p = ctypes.POINTER(ddwaf_result)
+
+
+class ddwaf_config_limits(ctypes.Structure):
+ _fields_ = [
+ ("max_container_size", ctypes.c_uint32),
+ ("max_container_depth", ctypes.c_uint32),
+ ("max_string_length", ctypes.c_uint32),
+ ]
+
+
+class ddwaf_config_obfuscator(ctypes.Structure):
+ _fields_ = [
+ ("key_regex", ctypes.c_char_p),
+ ("value_regex", ctypes.c_char_p),
+ ]
+
+
+ddwaf_object_free_fn = ctypes.CFUNCTYPE(None, ddwaf_object_p)
+ddwaf_object_free = ddwaf_object_free_fn(
+ ("ddwaf_object_free", ddwaf),
+ ((1, "object"),),
+)
+
+
+class ddwaf_config(ctypes.Structure):
+ _fields_ = [
+ ("limits", ddwaf_config_limits),
+ ("obfuscator", ddwaf_config_obfuscator),
+ ("free_fn", ddwaf_object_free_fn),
+ ]
+ # TODO : initial value of free_fn
+
+ def __init__(
+ self,
+ max_container_size: int = 0,
+ max_container_depth: int = 0,
+ max_string_length: int = 0,
+ key_regex: bytes = b"",
+ value_regex: bytes = b"",
+ free_fn=ddwaf_object_free,
+ ) -> None:
+ self.limits.max_container_size = max_container_size
+ self.limits.max_container_depth = max_container_depth
+ self.limits.max_string_length = max_string_length
+ self.obfuscator.key_regex = key_regex
+ self.obfuscator.value_regex = value_regex
+ self.free_fn = free_fn
+
+
+ddwaf_config_p = ctypes.POINTER(ddwaf_config)
+
+
+ddwaf_handle = ctypes.c_void_p # may stay as this because it's mainly an abstract type in the interface
+ddwaf_context = ctypes.c_void_p # may stay as this because it's mainly an abstract type in the interface
+
+
+class ddwaf_handle_capsule:
+ def __init__(self, handle: ddwaf_handle) -> None:
+ self.handle = handle
+ self.free_fn = ddwaf_destroy
+
+ def __del__(self):
+ if self.handle:
+ try:
+ self.free_fn(self.handle)
+ except TypeError:
+ pass
+ self.handle = None
+
+ def __bool__(self):
+ return bool(self.handle)
+
+
+class ddwaf_context_capsule:
+ def __init__(self, ctx: ddwaf_context) -> None:
+ self.ctx = ctx
+ self.free_fn = ddwaf_context_destroy
+
+ def __del__(self):
+ if self.ctx:
+ try:
+ self.free_fn(self.ctx)
+ except TypeError:
+ pass
+ self.ctx = None
+
+ def __bool__(self):
+ return bool(self.ctx)
+
+
+ddwaf_log_cb = ctypes.POINTER(
+ ctypes.CFUNCTYPE(
+ None, ctypes.c_int, ctypes.c_char_p, ctypes.c_char_p, ctypes.c_uint, ctypes.c_char_p, ctypes.c_uint64
+ )
+)
+
+
+#
+# Functions Prototypes (creating python counterpart function from C function with )
+#
+
+ddwaf_init = ctypes.CFUNCTYPE(ddwaf_handle, ddwaf_object_p, ddwaf_config_p, ddwaf_object_p)(
+ ("ddwaf_init", ddwaf),
+ (
+ (1, "ruleset_map"),
+ (1, "config", None),
+ (1, "diagnostics", None),
+ ),
+)
+
+
+def py_ddwaf_init(ruleset_map: ddwaf_object, config, info) -> ddwaf_handle_capsule:
+ return ddwaf_handle_capsule(ddwaf_init(ruleset_map, config, info))
+
+
+ddwaf_update = ctypes.CFUNCTYPE(ddwaf_handle, ddwaf_handle, ddwaf_object_p, ddwaf_object_p)(
+ ("ddwaf_update", ddwaf),
+ (
+ (1, "handle"),
+ (1, "ruleset_map"),
+ (1, "diagnostics", None),
+ ),
+)
+
+
+def py_ddwaf_update(handle: ddwaf_handle_capsule, ruleset_map: ddwaf_object, info) -> ddwaf_handle_capsule:
+ return ddwaf_handle_capsule(ddwaf_update(handle.handle, ruleset_map, ctypes.byref(info)))
+
+
+ddwaf_destroy = ctypes.CFUNCTYPE(None, ddwaf_handle)(
+ ("ddwaf_destroy", ddwaf),
+ ((1, "handle"),),
+)
+
+ddwaf_known_addresses = ctypes.CFUNCTYPE(
+ ctypes.POINTER(ctypes.c_char_p), ddwaf_handle, ctypes.POINTER(ctypes.c_uint32)
+)(
+ ("ddwaf_known_addresses", ddwaf),
+ (
+ (1, "handle"),
+ (1, "size"),
+ ),
+)
+
+
+def py_ddwaf_known_addresses(handle: ddwaf_handle_capsule) -> List[str]:
+ size = ctypes.c_uint32()
+ obj = ddwaf_known_addresses(handle.handle, ctypes.byref(size))
+ return [obj[i].decode("UTF-8") for i in range(size.value)]
+
+
+ddwaf_context_init = ctypes.CFUNCTYPE(ddwaf_context, ddwaf_handle)(
+ ("ddwaf_context_init", ddwaf),
+ ((1, "handle"),),
+)
+
+
+def py_ddwaf_context_init(handle: ddwaf_handle_capsule) -> ddwaf_context_capsule:
+ return ddwaf_context_capsule(ddwaf_context_init(handle.handle))
+
+
+ddwaf_run = ctypes.CFUNCTYPE(
+ ctypes.c_int, ddwaf_context, ddwaf_object_p, ddwaf_object_p, ddwaf_result_p, ctypes.c_uint64
+)(("ddwaf_run", ddwaf), ((1, "context"), (1, "persistent_data"), (1, "ephemeral_data"), (1, "result"), (1, "timeout")))
+
+ddwaf_context_destroy = ctypes.CFUNCTYPE(None, ddwaf_context)(
+ ("ddwaf_context_destroy", ddwaf),
+ ((1, "context"),),
+)
+
+ddwaf_result_free = ctypes.CFUNCTYPE(None, ddwaf_result_p)(
+ ("ddwaf_result_free", ddwaf),
+ ((1, "result"),),
+)
+
+ddwaf_object_invalid = ctypes.CFUNCTYPE(ddwaf_object_p, ddwaf_object_p)(
+ ("ddwaf_object_invalid", ddwaf),
+ ((3, "object"),),
+)
+
+ddwaf_object_string = ctypes.CFUNCTYPE(ddwaf_object_p, ddwaf_object_p, ctypes.c_char_p)(
+ ("ddwaf_object_string", ddwaf),
+ (
+ (3, "object"),
+ (1, "string"),
+ ),
+)
+
+# object_string variants not used
+
+ddwaf_object_string_from_unsigned = ctypes.CFUNCTYPE(ddwaf_object_p, ddwaf_object_p, ctypes.c_uint64)(
+ ("ddwaf_object_string_from_unsigned", ddwaf),
+ (
+ (3, "object"),
+ (1, "value"),
+ ),
+)
+
+ddwaf_object_string_from_signed = ctypes.CFUNCTYPE(ddwaf_object_p, ddwaf_object_p, ctypes.c_int64)(
+ ("ddwaf_object_string_from_signed", ddwaf),
+ (
+ (3, "object"),
+ (1, "value"),
+ ),
+)
+
+ddwaf_object_unsigned = ctypes.CFUNCTYPE(ddwaf_object_p, ddwaf_object_p, ctypes.c_uint64)(
+ ("ddwaf_object_unsigned", ddwaf),
+ (
+ (3, "object"),
+ (1, "value"),
+ ),
+)
+
+ddwaf_object_signed = ctypes.CFUNCTYPE(ddwaf_object_p, ddwaf_object_p, ctypes.c_int64)(
+ ("ddwaf_object_signed", ddwaf),
+ (
+ (3, "object"),
+ (1, "value"),
+ ),
+)
+
+# object_(un)signed_forced : not used ?
+
+ddwaf_object_bool = ctypes.CFUNCTYPE(ddwaf_object_p, ddwaf_object_p, ctypes.c_bool)(
+ ("ddwaf_object_bool", ddwaf),
+ (
+ (3, "object"),
+ (1, "value"),
+ ),
+)
+
+
+ddwaf_object_float = ctypes.CFUNCTYPE(ddwaf_object_p, ddwaf_object_p, ctypes.c_double)(
+ ("ddwaf_object_float", ddwaf),
+ (
+ (3, "object"),
+ (1, "value"),
+ ),
+)
+
+ddwaf_object_null = ctypes.CFUNCTYPE(ddwaf_object_p, ddwaf_object_p)(
+ ("ddwaf_object_null", ddwaf),
+ ((3, "object"),),
+)
+
+ddwaf_object_array = ctypes.CFUNCTYPE(ddwaf_object_p, ddwaf_object_p)(
+ ("ddwaf_object_array", ddwaf),
+ ((3, "object"),),
+)
+
+ddwaf_object_map = ctypes.CFUNCTYPE(ddwaf_object_p, ddwaf_object_p)(
+ ("ddwaf_object_map", ddwaf),
+ ((3, "object"),),
+)
+
+ddwaf_object_array_add = ctypes.CFUNCTYPE(ctypes.c_bool, ddwaf_object_p, ddwaf_object_p)(
+ ("ddwaf_object_array_add", ddwaf),
+ (
+ (1, "array"),
+ (1, "object"),
+ ),
+)
+
+ddwaf_object_map_add = ctypes.CFUNCTYPE(ctypes.c_bool, ddwaf_object_p, ctypes.c_char_p, ddwaf_object_p)(
+ ("ddwaf_object_map_add", ddwaf),
+ (
+ (1, "map"),
+ (1, "key"),
+ (1, "object"),
+ ),
+)
+
+# unused because accessible from python part
+# ddwaf_object_type
+# ddwaf_object_size
+# ddwaf_object_length
+# ddwaf_object_get_key
+# ddwaf_object_get_string
+# ddwaf_object_get_unsigned
+# ddwaf_object_get_signed
+# ddwaf_object_get_index
+# ddwaf_object_get_bool https://github.com/DataDog/libddwaf/commit/7dc68dacd972ae2e2a3c03a69116909c98dbd9cb
+# ddwaf_object_get_float
+
+
+ddwaf_get_version = ctypes.CFUNCTYPE(ctypes.c_char_p)(
+ ("ddwaf_get_version", ddwaf),
+ (),
+)
+
+
+ddwaf_set_log_cb = ctypes.CFUNCTYPE(ctypes.c_bool, ddwaf_log_cb, ctypes.c_int)(
+ ("ddwaf_set_log_cb", ddwaf),
+ (
+ (1, "cb"),
+ (1, "min_level"),
+ ),
+)
diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_ddwaf/libddwaf/x86_64/lib/libddwaf.so b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_ddwaf/libddwaf/x86_64/lib/libddwaf.so
new file mode 100644
index 0000000..ac482e1
Binary files /dev/null and b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_ddwaf/libddwaf/x86_64/lib/libddwaf.so differ
diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_deduplications.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_deduplications.py
new file mode 100644
index 0000000..b3d9c07
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_deduplications.py
@@ -0,0 +1,32 @@
+import os
+import time
+from typing import Dict
+
+from ddtrace.internal.utils.formats import asbool
+
+
+class deduplication:
+ _time_lapse = 3600
+
+ def __init__(self, func):
+ self.func = func
+ self._last_timestamp: float = time.time()
+ self.reported_logs: Dict[int, float] = dict()
+
+ def get_last_time_reported(self, raw_log_hash: int) -> float:
+ return self.reported_logs.get(raw_log_hash, 0.0)
+
+ def is_deduplication_enabled(self) -> bool:
+ return asbool(os.environ.get("_DD_APPSEC_DEDUPLICATION_ENABLED", "true"))
+
+ def __call__(self, *args, **kwargs):
+ result = None
+ if self.is_deduplication_enabled() is False:
+ result = self.func(*args, **kwargs)
+ else:
+ raw_log_hash = hash("".join([str(arg) for arg in args]))
+ last_reported_timestamp = self.get_last_time_reported(raw_log_hash)
+ if time.time() > last_reported_timestamp:
+ result = self.func(*args, **kwargs)
+ self.reported_logs[raw_log_hash] = time.time() + self._time_lapse
+ return result
diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_handlers.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_handlers.py
new file mode 100644
index 0000000..32efb83
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_handlers.py
@@ -0,0 +1,377 @@
+import functools
+import io
+import json
+
+import xmltodict
+
+from ddtrace.appsec._constants import SPAN_DATA_NAMES
+from ddtrace.appsec._iast._patch import if_iast_taint_returned_object_for
+from ddtrace.appsec._iast._patch import if_iast_taint_yield_tuple_for
+from ddtrace.appsec._iast._utils import _is_iast_enabled
+from ddtrace.contrib import trace_utils
+from ddtrace.ext import SpanTypes
+from ddtrace.internal import core
+from ddtrace.internal.constants import HTTP_REQUEST_BLOCKED
+from ddtrace.internal.logger import get_logger
+from ddtrace.internal.utils.http import parse_form_multipart
+from ddtrace.settings.asm import config as asm_config
+from ddtrace.vendor.wrapt import when_imported
+from ddtrace.vendor.wrapt import wrap_function_wrapper as _w
+
+
+log = get_logger(__name__)
+_BODY_METHODS = {"POST", "PUT", "DELETE", "PATCH"}
+
+
+def _get_content_length(environ):
+ content_length = environ.get("CONTENT_LENGTH")
+ transfer_encoding = environ.get("HTTP_TRANSFER_ENCODING")
+
+ if transfer_encoding == "chunked" or content_length is None:
+ return None
+
+ try:
+ return max(0, int(content_length))
+ except ValueError:
+ return 0
+
+
+# set_http_meta
+
+
+def _on_set_http_meta(
+ span,
+ request_ip,
+ raw_uri,
+ route,
+ method,
+ request_headers,
+ request_cookies,
+ parsed_query,
+ request_path_params,
+ request_body,
+ status_code,
+ response_headers,
+ response_cookies,
+):
+ if _is_iast_enabled():
+ from ddtrace.appsec._iast.taint_sinks.insecure_cookie import asm_check_cookies
+
+ if response_cookies:
+ asm_check_cookies(response_cookies)
+
+ if asm_config._asm_enabled and span.span_type == SpanTypes.WEB:
+ # avoid circular import
+ from ddtrace.appsec._asm_request_context import set_waf_address
+
+ status_code = str(status_code) if status_code is not None else None
+
+ addresses = [
+ (SPAN_DATA_NAMES.REQUEST_HTTP_IP, request_ip),
+ (SPAN_DATA_NAMES.REQUEST_URI_RAW, raw_uri),
+ (SPAN_DATA_NAMES.REQUEST_ROUTE, route),
+ (SPAN_DATA_NAMES.REQUEST_METHOD, method),
+ (SPAN_DATA_NAMES.REQUEST_HEADERS_NO_COOKIES, request_headers),
+ (SPAN_DATA_NAMES.REQUEST_COOKIES, request_cookies),
+ (SPAN_DATA_NAMES.REQUEST_QUERY, parsed_query),
+ (SPAN_DATA_NAMES.REQUEST_PATH_PARAMS, request_path_params),
+ (SPAN_DATA_NAMES.REQUEST_BODY, request_body),
+ (SPAN_DATA_NAMES.RESPONSE_STATUS, status_code),
+ (SPAN_DATA_NAMES.RESPONSE_HEADERS_NO_COOKIES, response_headers),
+ ]
+ for k, v in addresses:
+ if v is not None:
+ set_waf_address(k, v, span)
+
+
+core.on("set_http_meta_for_asm", _on_set_http_meta)
+
+
+# ASGI
+
+
+async def _on_asgi_request_parse_body(receive, headers):
+ if asm_config._asm_enabled:
+ data_received = await receive()
+ body = data_received.get("body", b"")
+
+ async def receive():
+ return data_received
+
+ content_type = headers.get("content-type") or headers.get("Content-Type")
+ try:
+ if content_type in ("application/json", "text/json"):
+ if body is None or body == b"":
+ req_body = None
+ else:
+ req_body = json.loads(body.decode())
+ elif content_type in ("application/xml", "text/xml"):
+ req_body = xmltodict.parse(body)
+ elif content_type == "text/plain":
+ req_body = None
+ else:
+ req_body = parse_form_multipart(body.decode(), headers) or None
+ return receive, req_body
+ except BaseException:
+ return receive, None
+
+ return receive, None
+
+
+# FLASK
+
+
+def _on_request_span_modifier(
+ ctx, flask_config, request, environ, _HAS_JSON_MIXIN, flask_version, flask_version_str, exception_type
+):
+ req_body = None
+ if asm_config._asm_enabled and request.method in _BODY_METHODS:
+ content_type = request.content_type
+ wsgi_input = environ.get("wsgi.input", "")
+
+ # Copy wsgi input if not seekable
+ if wsgi_input:
+ try:
+ seekable = wsgi_input.seekable()
+ except AttributeError:
+ seekable = False
+ if not seekable:
+ # https://gist.github.com/mitsuhiko/5721547
+ # Provide wsgi.input as an end-of-file terminated stream.
+ # In that case wsgi.input_terminated is set to True
+ # and an app is required to read to the end of the file and disregard CONTENT_LENGTH for reading.
+ if environ.get("wsgi.input_terminated"):
+ body = wsgi_input.read()
+ else:
+ content_length = _get_content_length(environ)
+ body = wsgi_input.read(content_length) if content_length else b""
+ environ["wsgi.input"] = io.BytesIO(body)
+
+ try:
+ if content_type in ("application/json", "text/json"):
+ if _HAS_JSON_MIXIN and hasattr(request, "json") and request.json:
+ req_body = request.json
+ elif request.data is None or request.data == b"":
+ req_body = None
+ else:
+ req_body = json.loads(request.data.decode("UTF-8"))
+ elif content_type in ("application/xml", "text/xml"):
+ req_body = xmltodict.parse(request.get_data())
+ elif hasattr(request, "form"):
+ req_body = request.form.to_dict()
+ else:
+ # no raw body
+ req_body = None
+ except (
+ exception_type,
+ AttributeError,
+ RuntimeError,
+ TypeError,
+ ValueError,
+ json.JSONDecodeError,
+ xmltodict.expat.ExpatError,
+ xmltodict.ParsingInterrupted,
+ ):
+ log.debug("Failed to parse request body", exc_info=True)
+ finally:
+ # Reset wsgi input to the beginning
+ if wsgi_input:
+ if seekable:
+ wsgi_input.seek(0)
+ else:
+ environ["wsgi.input"] = io.BytesIO(body)
+ return req_body
+
+
+def _on_request_init(wrapped, instance, args, kwargs):
+ wrapped(*args, **kwargs)
+ if _is_iast_enabled():
+ try:
+ from ddtrace.appsec._iast._metrics import _set_metric_iast_instrumented_source
+ from ddtrace.appsec._iast._taint_tracking import OriginType
+ from ddtrace.appsec._iast._taint_tracking import taint_pyobject
+ from ddtrace.appsec._iast.processor import AppSecIastSpanProcessor
+
+ _set_metric_iast_instrumented_source(OriginType.PATH)
+ _set_metric_iast_instrumented_source(OriginType.QUERY)
+
+ if not AppSecIastSpanProcessor.is_span_analyzed():
+ return
+
+ # TODO: instance.query_string = ??
+ instance.query_string = taint_pyobject(
+ pyobject=instance.query_string,
+ source_name=OriginType.QUERY,
+ source_value=instance.query_string,
+ source_origin=OriginType.QUERY,
+ )
+ instance.path = taint_pyobject(
+ pyobject=instance.path,
+ source_name=OriginType.PATH,
+ source_value=instance.path,
+ source_origin=OriginType.PATH,
+ )
+ except Exception:
+ log.debug("Unexpected exception while tainting pyobject", exc_info=True)
+
+
+def _on_flask_patch(flask_version):
+ if _is_iast_enabled():
+ try:
+ from ddtrace.appsec._iast._metrics import _set_metric_iast_instrumented_source
+ from ddtrace.appsec._iast._taint_tracking import OriginType
+
+ _w(
+ "werkzeug.datastructures",
+ "Headers.items",
+ functools.partial(if_iast_taint_yield_tuple_for, (OriginType.HEADER_NAME, OriginType.HEADER)),
+ )
+ _set_metric_iast_instrumented_source(OriginType.HEADER_NAME)
+ _set_metric_iast_instrumented_source(OriginType.HEADER)
+
+ _w(
+ "werkzeug.datastructures",
+ "ImmutableMultiDict.__getitem__",
+ functools.partial(if_iast_taint_returned_object_for, OriginType.PARAMETER),
+ )
+ _set_metric_iast_instrumented_source(OriginType.PARAMETER)
+
+ _w(
+ "werkzeug.datastructures",
+ "EnvironHeaders.__getitem__",
+ functools.partial(if_iast_taint_returned_object_for, OriginType.HEADER),
+ )
+ _set_metric_iast_instrumented_source(OriginType.HEADER)
+
+ _w("werkzeug.wrappers.request", "Request.__init__", _on_request_init)
+ _w(
+ "werkzeug.wrappers.request",
+ "Request.get_data",
+ functools.partial(if_iast_taint_returned_object_for, OriginType.BODY),
+ )
+ _set_metric_iast_instrumented_source(OriginType.BODY)
+
+ if flask_version < (2, 0, 0):
+ _w(
+ "werkzeug._internal",
+ "_DictAccessorProperty.__get__",
+ functools.partial(if_iast_taint_returned_object_for, OriginType.QUERY),
+ )
+ _set_metric_iast_instrumented_source(OriginType.QUERY)
+ except Exception:
+ log.debug("Unexpected exception while patch IAST functions", exc_info=True)
+
+
+def _on_flask_blocked_request(_):
+ core.set_item(HTTP_REQUEST_BLOCKED, True)
+
+
+def _on_django_func_wrapped(fn_args, fn_kwargs, first_arg_expected_type, *_):
+ # If IAST is enabled and we're wrapping a Django view call, taint the kwargs (view's
+ # path parameters)
+ if _is_iast_enabled() and fn_args and isinstance(fn_args[0], first_arg_expected_type):
+ from ddtrace.appsec._iast._taint_tracking import OriginType # noqa: F401
+ from ddtrace.appsec._iast._taint_tracking import is_pyobject_tainted
+ from ddtrace.appsec._iast._taint_tracking import taint_pyobject
+ from ddtrace.appsec._iast._taint_utils import taint_structure
+ from ddtrace.appsec._iast.processor import AppSecIastSpanProcessor
+
+ if not AppSecIastSpanProcessor.is_span_analyzed():
+ return
+
+ http_req = fn_args[0]
+
+ http_req.COOKIES = taint_structure(http_req.COOKIES, OriginType.COOKIE_NAME, OriginType.COOKIE)
+ http_req.GET = taint_structure(http_req.GET, OriginType.PARAMETER_NAME, OriginType.PARAMETER)
+ http_req.POST = taint_structure(http_req.POST, OriginType.BODY, OriginType.BODY)
+ if not is_pyobject_tainted(getattr(http_req, "_body", None)):
+ http_req._body = taint_pyobject(
+ http_req.body,
+ source_name="body",
+ source_value=http_req.body,
+ source_origin=OriginType.BODY,
+ )
+
+ http_req.headers = taint_structure(http_req.headers, OriginType.HEADER_NAME, OriginType.HEADER)
+ http_req.path = taint_pyobject(
+ http_req.path, source_name="path", source_value=http_req.path, source_origin=OriginType.PATH
+ )
+ http_req.path_info = taint_pyobject(
+ http_req.path_info,
+ source_name="path",
+ source_value=http_req.path,
+ source_origin=OriginType.PATH,
+ )
+ http_req.environ["PATH_INFO"] = taint_pyobject(
+ http_req.environ["PATH_INFO"],
+ source_name="path",
+ source_value=http_req.path,
+ source_origin=OriginType.PATH,
+ )
+ http_req.META = taint_structure(http_req.META, OriginType.HEADER_NAME, OriginType.HEADER)
+ if fn_kwargs:
+ try:
+ for k, v in fn_kwargs.items():
+ fn_kwargs[k] = taint_pyobject(
+ v, source_name=k, source_value=v, source_origin=OriginType.PATH_PARAMETER
+ )
+ except Exception:
+ log.debug("IAST: Unexpected exception while tainting path parameters", exc_info=True)
+
+
+def _on_wsgi_environ(wrapped, _instance, args, kwargs):
+ if _is_iast_enabled():
+ if not args:
+ return wrapped(*args, **kwargs)
+
+ from ddtrace.appsec._iast._metrics import _set_metric_iast_instrumented_source
+ from ddtrace.appsec._iast._taint_tracking import OriginType # noqa: F401
+ from ddtrace.appsec._iast._taint_utils import taint_structure
+ from ddtrace.appsec._iast.processor import AppSecIastSpanProcessor
+
+ _set_metric_iast_instrumented_source(OriginType.HEADER_NAME)
+ _set_metric_iast_instrumented_source(OriginType.HEADER)
+ # we instrument those sources on _on_django_func_wrapped
+ _set_metric_iast_instrumented_source(OriginType.PATH_PARAMETER)
+ _set_metric_iast_instrumented_source(OriginType.PATH)
+ _set_metric_iast_instrumented_source(OriginType.COOKIE)
+ _set_metric_iast_instrumented_source(OriginType.COOKIE_NAME)
+ _set_metric_iast_instrumented_source(OriginType.PARAMETER)
+ _set_metric_iast_instrumented_source(OriginType.PARAMETER_NAME)
+ _set_metric_iast_instrumented_source(OriginType.BODY)
+
+ if not AppSecIastSpanProcessor.is_span_analyzed():
+ return wrapped(*args, **kwargs)
+
+ return wrapped(*((taint_structure(args[0], OriginType.HEADER_NAME, OriginType.HEADER),) + args[1:]), **kwargs)
+
+ return wrapped(*args, **kwargs)
+
+
+def _on_django_patch():
+ try:
+ from ddtrace.appsec._iast._taint_tracking import OriginType # noqa: F401
+
+ when_imported("django.http.request")(
+ lambda m: trace_utils.wrap(
+ m,
+ "QueryDict.__getitem__",
+ functools.partial(if_iast_taint_returned_object_for, OriginType.PARAMETER),
+ )
+ )
+ except Exception:
+ log.debug("Unexpected exception while patch IAST functions", exc_info=True)
+
+
+def listen():
+ core.on("flask.request_call_modifier", _on_request_span_modifier, "request_body")
+ core.on("flask.request_init", _on_request_init)
+ core.on("flask.blocked_request_callable", _on_flask_blocked_request)
+
+
+core.on("django.func.wrapped", _on_django_func_wrapped)
+core.on("django.wsgi_environ", _on_wsgi_environ, "wrapped_result")
+core.on("django.patch", _on_django_patch)
+core.on("flask.patch", _on_flask_patch)
+
+core.on("asgi.request.parse.body", _on_asgi_request_parse_body, "await_receive_and_body")
diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/__init__.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/__init__.py
new file mode 100644
index 0000000..ddbdd58
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/__init__.py
@@ -0,0 +1,74 @@
+"""IAST (interactive application security testing) analyzes code for security vulnerabilities.
+
+To add new vulnerabilities analyzers (Taint sink) we should update `IAST_PATCH` in
+`ddtrace/appsec/iast/_patch_modules.py`
+
+Create new file with the same name: `ddtrace/appsec/iast/taint_sinks/[my_new_vulnerability].py`
+
+Then, implement the `patch()` function and its wrappers.
+
+In order to have the better performance, the Overhead control engine (OCE) helps us to control the overhead of our
+wrapped functions. We should create a class that inherit from `ddtrace.appsec._iast.taint_sinks._base.VulnerabilityBase`
+and register with `ddtrace.appsec._iast.oce`.
+
+@oce.register
+class MyVulnerability(VulnerabilityBase):
+ vulnerability_type = "MyVulnerability"
+ evidence_type = "kind_of_Vulnerability"
+
+Before that, we should decorate our wrappers with `wrap` method and
+report the vulnerabilities with `report` method. OCE will manage the number of requests, number of vulnerabilities
+to reduce the overhead.
+
+@WeakHash.wrap
+def wrapped_function(wrapped, instance, args, kwargs):
+ # type: (Callable, str, Any, Any, Any) -> Any
+ WeakHash.report(
+ evidence_value=evidence,
+ )
+ return wrapped(*args, **kwargs)
+""" # noqa: RST201, RST213, RST210
+import inspect
+import sys
+
+from ddtrace.internal.logger import get_logger
+
+from ._overhead_control_engine import OverheadControl
+from ._utils import _is_iast_enabled
+
+
+log = get_logger(__name__)
+
+oce = OverheadControl()
+
+
+def ddtrace_iast_flask_patch():
+ """
+ Patch the code inside the Flask main app source code file (typically "app.py") so
+ IAST/Custom Code propagation works also for the functions and methods defined inside it.
+ This must be called on the top level or inside the `if __name__ == "__main__"`
+ and must be before the `app.run()` call. It also requires `DD_IAST_ENABLED` to be
+ activated.
+ """
+ if not _is_iast_enabled():
+ return
+
+ from ._ast.ast_patching import astpatch_module
+
+ module_name = inspect.currentframe().f_back.f_globals["__name__"]
+ module = sys.modules[module_name]
+ try:
+ module_path, patched_ast = astpatch_module(module, remove_flask_run=True)
+ except Exception:
+ log.debug("Unexpected exception while AST patching", exc_info=True)
+ return
+
+ compiled_code = compile(patched_ast, module_path, "exec")
+ exec(compiled_code, module.__dict__) # nosec B102
+ sys.modules[module_name] = compiled_code
+
+
+__all__ = [
+ "oce",
+ "ddtrace_iast_flask_patch",
+]
diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_ast/__init__.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_ast/__init__.py
new file mode 100644
index 0000000..e5a0d9b
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_ast/__init__.py
@@ -0,0 +1 @@
+#!/usr/bin/env python3
diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_ast/ast_patching.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_ast/ast_patching.py
new file mode 100644
index 0000000..9eb516a
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_ast/ast_patching.py
@@ -0,0 +1,172 @@
+#!/usr/bin/env python3
+
+import ast
+import codecs
+import os
+import re
+from sys import builtin_module_names
+from types import ModuleType
+from typing import TYPE_CHECKING # noqa:F401
+from typing import Tuple
+
+
+if TYPE_CHECKING:
+ from typing import Optional # noqa:F401
+
+from ddtrace.appsec._constants import IAST
+from ddtrace.appsec._python_info.stdlib import _stdlib_for_python_version
+from ddtrace.internal.logger import get_logger
+from ddtrace.internal.module import origin
+
+from .visitor import AstVisitor
+
+
+# Prefixes for modules where IAST patching is allowed
+IAST_ALLOWLIST = ("tests.appsec.iast",) # type: tuple[str, ...]
+IAST_DENYLIST = ("ddtrace", "pkg_resources") # type: tuple[str, ...]
+
+
+if IAST.PATCH_MODULES in os.environ:
+ IAST_ALLOWLIST += tuple(os.environ[IAST.PATCH_MODULES].split(IAST.SEP_MODULES))
+
+if IAST.DENY_MODULES in os.environ:
+ IAST_DENYLIST += tuple(os.environ[IAST.DENY_MODULES].split(IAST.SEP_MODULES))
+
+
+ENCODING = ""
+
+log = get_logger(__name__)
+
+
+def get_encoding(module_path): # type: (str) -> str
+ """
+ First tries to detect the encoding for the file,
+ otherwise, returns global encoding default
+ """
+ global ENCODING
+ if not ENCODING:
+ try:
+ ENCODING = codecs.lookup("utf-8-sig").name
+ except LookupError:
+ ENCODING = codecs.lookup("utf-8").name
+ return ENCODING
+
+
+try:
+ import importlib.metadata as il_md
+except ImportError:
+ import importlib_metadata as il_md # type: ignore[no-redef]
+
+
+def _build_installed_package_names_list(): # type: (...) -> set[str]
+ return {
+ ilmd_d.metadata["name"] for ilmd_d in il_md.distributions() if ilmd_d is not None and ilmd_d.files is not None
+ }
+
+
+_NOT_PATCH_MODULE_NAMES = (
+ _build_installed_package_names_list() | _stdlib_for_python_version() | set(builtin_module_names)
+)
+
+
+def _in_python_stdlib_or_third_party(module_name): # type: (str) -> bool
+ return module_name.split(".")[0].lower() in [x.lower() for x in _NOT_PATCH_MODULE_NAMES]
+
+
+def _should_iast_patch(module_name): # type: (str) -> bool
+ """
+ select if module_name should be patch from the longuest prefix that match in allow or deny list.
+ if a prefix is in both list, deny is selected.
+ """
+ max_allow = max((len(prefix) for prefix in IAST_ALLOWLIST if module_name.startswith(prefix)), default=-1)
+ max_deny = max((len(prefix) for prefix in IAST_DENYLIST if module_name.startswith(prefix)), default=-1)
+ diff = max_allow - max_deny
+ return diff > 0 or (diff == 0 and not _in_python_stdlib_or_third_party(module_name))
+
+
+def visit_ast(
+ source_text, # type: str
+ module_path, # type: str
+ module_name="", # type: str
+): # type: (...) -> Optional[str]
+ parsed_ast = ast.parse(source_text, module_path)
+
+ visitor = AstVisitor(
+ filename=module_path,
+ module_name=module_name,
+ )
+ modified_ast = visitor.visit(parsed_ast)
+
+ if not visitor.ast_modified:
+ return None
+
+ ast.fix_missing_locations(modified_ast)
+ return modified_ast
+
+
+_FLASK_INSTANCE_REGEXP = re.compile(r"(\S*)\s*=.*Flask\(.*")
+
+
+def _remove_flask_run(text): # type (str) -> str
+ """
+ Find and remove flask app.run() call. This is used for patching
+ the app.py file and exec'ing to replace the module without creating
+ a new instance.
+ """
+ flask_instance_name = re.search(_FLASK_INSTANCE_REGEXP, text)
+ groups = flask_instance_name.groups()
+ if not groups:
+ return text
+
+ instance_name = groups[-1]
+ new_text = re.sub(instance_name + r"\.run\(.*\)", "pass", text)
+ return new_text
+
+
+def astpatch_module(module: ModuleType, remove_flask_run: bool = False) -> Tuple[str, str]:
+ module_name = module.__name__
+ module_path = str(origin(module))
+ try:
+ if os.stat(module_path).st_size == 0:
+ # Don't patch empty files like __init__.py
+ log.debug("empty file: %s", module_path)
+ return "", ""
+ except OSError:
+ log.debug("astpatch_source couldn't find the file: %s", module_path, exc_info=True)
+ return "", ""
+
+ # Get the file extension, if it's dll, os, pyd, dyn, dynlib: return
+ # If its pyc or pyo, change to .py and check that the file exists. If not,
+ # return with warning.
+ _, module_ext = os.path.splitext(module_path)
+
+ if module_ext.lower() not in {".pyo", ".pyc", ".pyw", ".py"}:
+ # Probably native or built-in module
+ log.debug("extension not supported: %s for: %s", module_ext, module_path)
+ return "", ""
+
+ with open(module_path, "r", encoding=get_encoding(module_path)) as source_file:
+ try:
+ source_text = source_file.read()
+ except UnicodeDecodeError:
+ log.debug("unicode decode error for file: %s", module_path, exc_info=True)
+ return "", ""
+
+ if len(source_text.strip()) == 0:
+ # Don't patch empty files like __init__.py
+ log.debug("empty file: %s", module_path)
+ return "", ""
+
+ if remove_flask_run:
+ source_text = _remove_flask_run(source_text)
+
+ new_source = visit_ast(
+ source_text,
+ module_path,
+ module_name=module_name,
+ )
+ if new_source is None:
+ log.debug("file not ast patched: %s", module_path)
+ return "", ""
+
+ return module_path, new_source
diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_ast/visitor.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_ast/visitor.py
new file mode 100644
index 0000000..d7a4b08
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_ast/visitor.py
@@ -0,0 +1,759 @@
+#!/usr/bin/env python3
+from _ast import Expr
+from _ast import ImportFrom
+import ast
+import copy
+import sys
+from typing import Any # noqa:F401
+from typing import List # noqa:F401
+from typing import Set # noqa:F401
+
+from .._metrics import _set_metric_iast_instrumented_propagation
+from ..constants import DEFAULT_PATH_TRAVERSAL_FUNCTIONS
+from ..constants import DEFAULT_WEAK_RANDOMNESS_FUNCTIONS
+
+
+PY3 = sys.version_info[0] >= 3
+PY30_37 = sys.version_info >= (3, 0, 0) and sys.version_info < (3, 8, 0)
+PY38_PLUS = sys.version_info >= (3, 8, 0)
+PY39_PLUS = sys.version_info >= (3, 9, 0)
+
+CODE_TYPE_FIRST_PARTY = "first_party"
+CODE_TYPE_DD = "datadog"
+CODE_TYPE_SITE_PACKAGES = "site_packages"
+CODE_TYPE_STDLIB = "stdlib"
+TAINT_SINK_FUNCTION_REPLACEMENT = "ddtrace_taint_sinks.ast_function"
+
+
+def _mark_avoid_convert_recursively(node):
+ if node is not None:
+ node.avoid_convert = True
+ for child in ast.iter_child_nodes(node):
+ _mark_avoid_convert_recursively(child)
+
+
+class AstVisitor(ast.NodeTransformer):
+ def __init__(
+ self,
+ filename="",
+ module_name="",
+ ):
+ # Offset caused by inserted lines. Will be adjusted in visit_Generic
+ self._aspects_spec = {
+ "definitions_module": "ddtrace.appsec._iast._taint_tracking.aspects",
+ "alias_module": "ddtrace_aspects",
+ "functions": {
+ "str": "ddtrace_aspects.str_aspect",
+ "bytes": "ddtrace_aspects.bytes_aspect",
+ "bytearray": "ddtrace_aspects.bytearray_aspect",
+ "ddtrace_iast_flask_patch": "ddtrace_aspects.empty_func", # To avoid recursion
+ },
+ "stringalike_methods": {
+ "decode": "ddtrace_aspects.decode_aspect",
+ "join": "ddtrace_aspects.join_aspect",
+ "encode": "ddtrace_aspects.encode_aspect",
+ "extend": "ddtrace_aspects.bytearray_extend_aspect",
+ "upper": "ddtrace_aspects.upper_aspect",
+ "lower": "ddtrace_aspects.lower_aspect",
+ "replace": "ddtrace_aspects.replace_aspect",
+ "swapcase": "ddtrace_aspects.swapcase_aspect",
+ "title": "ddtrace_aspects.title_aspect",
+ "capitalize": "ddtrace_aspects.capitalize_aspect",
+ "casefold": "ddtrace_aspects.casefold_aspect",
+ "translate": "ddtrace_aspects.translate_aspect",
+ "format": "ddtrace_aspects.format_aspect",
+ "format_map": "ddtrace_aspects.format_map_aspect",
+ "zfill": "ddtrace_aspects.zfill_aspect",
+ "ljust": "ddtrace_aspects.ljust_aspect",
+ },
+ # Replacement function for indexes and ranges
+ "slices": {
+ "index": "ddtrace_aspects.index_aspect",
+ "slice": "ddtrace_aspects.slice_aspect",
+ },
+ # Replacement functions for modules
+ "module_functions": {
+ # "BytesIO": "ddtrace_aspects.stringio_aspect",
+ # "StringIO": "ddtrace_aspects.stringio_aspect",
+ # "format": "ddtrace_aspects.format_aspect",
+ # "format_map": "ddtrace_aspects.format_map_aspect",
+ },
+ "operators": {
+ ast.Add: "ddtrace_aspects.add_aspect",
+ "FORMAT_VALUE": "ddtrace_aspects.format_value_aspect",
+ ast.Mod: "ddtrace_aspects.modulo_aspect",
+ "BUILD_STRING": "ddtrace_aspects.build_string_aspect",
+ },
+ "excluded_from_patching": {
+ # Key: module being patched
+ # Value: dict with more info
+ "django.utils.formats": {
+ # Key: called functions that won't be patched. E.g.: for this module
+ # not a single call for format on any function will be patched.
+ #
+ # Value: function definitions. E.g.: we won't patch any Call node inside
+ # the iter_format_modules(). If we, for example, had 'foo': ('bar', 'baz')
+ # it would mean that we wouldn't patch any call to foo() done inside the
+ # bar() or baz() function definitions.
+ "format": ("",),
+ "": ("iter_format_modules",),
+ },
+ "django.utils.log": {
+ "": ("",),
+ },
+ "django.utils.html": {"": ("format_html", "format_html_join")},
+ },
+ # This is a set since all functions will be replaced by taint_sink_functions
+ "taint_sinks": {
+ "weak_randomness": DEFAULT_WEAK_RANDOMNESS_FUNCTIONS,
+ "path_traversal": DEFAULT_PATH_TRAVERSAL_FUNCTIONS,
+ "other": {
+ "load",
+ "run",
+ "path",
+ "exit",
+ "sleep",
+ "socket",
+ },
+ # These explicitly WON'T be replaced by taint_sink_function:
+ "disabled": {
+ "__new__",
+ "__init__",
+ "__dir__",
+ "__repr__",
+ "super",
+ },
+ },
+ }
+ self._sinkpoints_spec = {
+ "definitions_module": "ddtrace.appsec._iast.taint_sinks",
+ "alias_module": "ddtrace_taint_sinks",
+ "functions": {
+ "open": "ddtrace_taint_sinks.open_path_traversal",
+ },
+ }
+ self._sinkpoints_functions = self._sinkpoints_spec["functions"]
+ self.ast_modified = False
+ self.filename = filename
+ self.module_name = module_name
+
+ self._aspect_index = self._aspects_spec["slices"]["index"]
+ self._aspect_slice = self._aspects_spec["slices"]["slice"]
+ self._aspect_functions = self._aspects_spec["functions"]
+ self._aspect_operators = self._aspects_spec["operators"]
+ self._aspect_methods = self._aspects_spec["stringalike_methods"]
+ self._aspect_modules = self._aspects_spec["module_functions"]
+ self._aspect_format_value = self._aspects_spec["operators"]["FORMAT_VALUE"]
+ self._aspect_build_string = self._aspects_spec["operators"]["BUILD_STRING"]
+ self.excluded_functions = self._aspects_spec["excluded_from_patching"].get(self.module_name, {})
+
+ # Sink points
+ self._taint_sink_replace_any = self._merge_taint_sinks(
+ self._aspects_spec["taint_sinks"]["other"],
+ self._aspects_spec["taint_sinks"]["weak_randomness"],
+ *[functions for module, functions in self._aspects_spec["taint_sinks"]["path_traversal"].items()],
+ )
+ self._taint_sink_replace_disabled = self._aspects_spec["taint_sinks"]["disabled"]
+
+ self.dont_patch_these_functionsdefs = set()
+ for _, v in self.excluded_functions.items():
+ if v:
+ for i in v:
+ self.dont_patch_these_functionsdefs.add(i)
+
+ # This will be enabled when we find a module and function where we avoid doing
+ # replacements and enabled again on all the others
+ self.replacements_disabled_for_functiondef = False
+
+ self.codetype = CODE_TYPE_FIRST_PARTY
+ if "ast/tests/fixtures" in self.filename:
+ self.codetype = CODE_TYPE_FIRST_PARTY
+ elif "ddtrace" in self.filename and ("site-packages" in self.filename or "dist-packages" in self.filename):
+ self.codetype = CODE_TYPE_DD
+ elif "site-packages" in self.filename or "dist-packages" in self.filename:
+ self.codetype = CODE_TYPE_SITE_PACKAGES
+ elif "lib/python" in self.filename:
+ self.codetype = CODE_TYPE_STDLIB
+
+ @staticmethod
+ def _merge_taint_sinks(*args_functions: Set[str]) -> Set[str]:
+ merged_set = set()
+
+ for functions in args_functions:
+ merged_set.update(functions)
+
+ return merged_set
+
+ def _is_string_node(self, node): # type: (Any) -> bool
+ if PY30_37 and isinstance(node, ast.Bytes):
+ return True
+
+ if PY3 and (isinstance(node, ast.Constant) and isinstance(node.value, (str, bytes, bytearray))):
+ return True
+
+ return False
+
+ def _is_numeric_node(self, node): # type: (Any) -> bool
+ if PY30_37 and isinstance(node, ast.Num):
+ return True
+
+ if PY38_PLUS and (isinstance(node, ast.Constant) and isinstance(node.value, (int, float))):
+ return True
+
+ return False
+
+ def _is_node_constant_or_binop(self, node): # type: (Any) -> bool
+ return self._is_string_node(node) or self._is_numeric_node(node) or isinstance(node, ast.BinOp)
+
+ def _is_call_excluded(self, func_name_node): # type: (str) -> bool
+ if not self.excluded_functions:
+ return False
+ excluded_for_caller = self.excluded_functions.get(func_name_node, tuple()) + self.excluded_functions.get(
+ "", tuple()
+ )
+ return "" in excluded_for_caller or self._current_function_name in excluded_for_caller
+
+ def _is_string_format_with_literals(self, call_node):
+ # type: (ast.Call) -> bool
+ return (
+ self._is_string_node(call_node.func.value) # type: ignore[attr-defined]
+ and call_node.func.attr == "format" # type: ignore[attr-defined]
+ and all(map(self._is_node_constant_or_binop, call_node.args))
+ and all(map(lambda x: self._is_node_constant_or_binop(x.value), call_node.keywords))
+ )
+
+ def _get_function_name(self, call_node, is_function): # type: (ast.Call, bool) -> str
+ if is_function:
+ return call_node.func.id # type: ignore[attr-defined]
+ # If the call is to a method
+ elif type(call_node.func) == ast.Name:
+ return call_node.func.id
+
+ return call_node.func.attr # type: ignore[attr-defined]
+
+ def _should_replace_with_taint_sink(self, call_node, is_function): # type: (ast.Call, bool) -> bool
+ function_name = self._get_function_name(call_node, is_function)
+
+ if function_name in self._taint_sink_replace_disabled:
+ return False
+
+ return any(allowed in function_name for allowed in self._taint_sink_replace_any)
+
+ def _add_original_function_as_arg(self, call_node, is_function): # type: (ast.Call, bool) -> Any
+ """
+ Creates the arguments for the original function
+ """
+ function_name = self._get_function_name(call_node, is_function)
+ function_name_arg = (
+ self._name_node(call_node, function_name, ctx=ast.Load()) if is_function else copy.copy(call_node.func)
+ )
+
+ # Arguments for stack info change from:
+ # my_function(self, *args, **kwargs)
+ # to:
+ # _add_original_function_as_arg(function_name=my_function, self, *args, **kwargs)
+ new_args = [
+ function_name_arg,
+ ] + call_node.args
+
+ return new_args
+
+ def _node(self, type_, pos_from_node, **kwargs):
+ # type: (Any, Any, Any) -> Any
+ """
+ Abstract some basic differences in node structure between versions
+ """
+
+ # Some nodes (like Module) dont have position
+ lineno = getattr(pos_from_node, "lineno", 1)
+ col_offset = getattr(pos_from_node, "col_offset", 0)
+
+ if PY30_37:
+ # No end_lineno or end_pos_offset
+ return type_(lineno=lineno, col_offset=col_offset, **kwargs)
+
+ # Py38+
+ end_lineno = getattr(pos_from_node, "end_lineno", 1)
+ end_col_offset = getattr(pos_from_node, "end_col_offset", 0)
+
+ return type_(
+ lineno=lineno, end_lineno=end_lineno, col_offset=col_offset, end_col_offset=end_col_offset, **kwargs
+ )
+
+ def _name_node(self, from_node, _id, ctx=ast.Load()): # noqa: B008
+ # type: (Any, str, Any) -> ast.Name
+ return self._node(
+ ast.Name,
+ from_node,
+ id=_id,
+ ctx=ctx,
+ )
+
+ def _attr_node(self, from_node, attr, ctx=ast.Load()): # noqa: B008
+ # type: (Any, str, Any) -> ast.Name
+ attr_attr = ""
+ name_attr = ""
+ if attr:
+ aspect_split = attr.split(".")
+ if len(aspect_split) > 1:
+ attr_attr = aspect_split[1]
+ name_attr = aspect_split[0]
+
+ name_node = self._name_node(from_node, name_attr, ctx=ctx)
+ return self._node(ast.Attribute, from_node, attr=attr_attr, ctx=ctx, value=name_node)
+
+ def _assign_node(self, from_node, targets, value): # type: (Any, List[Any], Any) -> Any
+ return self._node(
+ ast.Assign,
+ from_node,
+ targets=targets,
+ value=value,
+ type_comment=None,
+ )
+
+ def find_insert_position(self, module_node): # type: (ast.Module) -> int
+ insert_position = 0
+ from_future_import_found = False
+ import_found = False
+
+ # Check all nodes that are "from __future__ import...", as we must insert after them.
+ #
+ # Caveat:
+ # - body_node.lineno doesn't work because a large docstring changes the lineno
+ # but not the position in the nodes (i.e. this can happen: lineno==52, position==2)
+ # TODO: Test and implement cases with docstrings before future imports, etc.
+ for body_node in module_node.body:
+ insert_position += 1
+ if isinstance(body_node, ImportFrom) and body_node.module == "__future__":
+ import_found = True
+ from_future_import_found = True
+ # As soon as we start a non-futuristic import we can stop looking
+ elif isinstance(body_node, ImportFrom):
+ import_found = True
+ elif isinstance(body_node, Expr) and not import_found:
+ continue
+ elif from_future_import_found:
+ insert_position -= 1
+ break
+ else:
+ break
+
+ if not from_future_import_found:
+ # No futuristic import found, reset the position to 0
+ insert_position = 0
+
+ return insert_position
+
+ def _none_constant(self, from_node, ctx=ast.Load()): # noqa: B008
+ # type: (Any, Any) -> Any
+ if PY30_37:
+ return ast.NameConstant(lineno=from_node.lineno, col_offset=from_node.col_offset, value=None)
+
+ # 3.8+
+ return ast.Constant(
+ lineno=from_node.lineno,
+ col_offset=from_node.col_offset,
+ end_lineno=from_node.end_lineno,
+ end_col_offset=from_node.end_col_offset,
+ value=None,
+ kind=None,
+ )
+
+ def _int_constant(self, from_node, value):
+ return ast.Constant(
+ lineno=from_node.lineno,
+ col_offset=from_node.col_offset,
+ end_lineno=getattr(from_node, "end_lineno", from_node.lineno),
+ end_col_offset=from_node.col_offset + 1,
+ value=value,
+ kind=None,
+ )
+
+ def _call_node(self, from_node, func, args): # type: (Any, Any, List[Any]) -> Any
+ return self._node(ast.Call, from_node, func=func, args=args, keywords=[])
+
+ def visit_Module(self, module_node):
+ # type: (ast.Module) -> Any
+ """
+ Insert the import statement for the replacements module
+ """
+ insert_position = self.find_insert_position(module_node)
+
+ definitions_module = self._aspects_spec["definitions_module"]
+ replacements_import = self._node(
+ ast.Import,
+ module_node,
+ names=[
+ ast.alias(
+ lineno=1,
+ col_offset=0,
+ name=definitions_module,
+ asname=self._aspects_spec["alias_module"],
+ )
+ ],
+ )
+ module_node.body.insert(insert_position, replacements_import)
+
+ definitions_module = self._sinkpoints_spec["definitions_module"]
+ replacements_import = self._node(
+ ast.Import,
+ module_node,
+ names=[
+ ast.alias(
+ lineno=1,
+ col_offset=0,
+ name=definitions_module,
+ asname=self._sinkpoints_spec["alias_module"],
+ )
+ ],
+ )
+ module_node.body.insert(insert_position, replacements_import)
+ # Must be called here instead of the start so the line offset is already
+ # processed
+ self.generic_visit(module_node)
+ return module_node
+
+ def visit_FunctionDef(self, def_node):
+ # type: (ast.FunctionDef) -> Any
+ """
+ Special case for some tests which would enter in a patching
+ loop otherwise when visiting the check functions
+ """
+ self.replacements_disabled_for_functiondef = def_node.name in self.dont_patch_these_functionsdefs
+
+ if hasattr(def_node.args, "vararg") and def_node.args.vararg:
+ if def_node.args.vararg.annotation:
+ _mark_avoid_convert_recursively(def_node.args.vararg.annotation)
+
+ if hasattr(def_node.args, "kwarg") and def_node.args.kwarg:
+ if def_node.args.kwarg.annotation:
+ _mark_avoid_convert_recursively(def_node.args.kwarg.annotation)
+
+ if hasattr(def_node, "returns"):
+ _mark_avoid_convert_recursively(def_node.returns)
+
+ for i in def_node.args.args:
+ if hasattr(i, "annotation"):
+ _mark_avoid_convert_recursively(i.annotation)
+
+ if hasattr(def_node.args, "kwonlyargs"):
+ for i in def_node.args.kwonlyargs:
+ if hasattr(i, "annotation"):
+ _mark_avoid_convert_recursively(i.annotation)
+
+ if hasattr(def_node.args, "posonlyargs"):
+ for i in def_node.args.posonlyargs:
+ if hasattr(i, "annotation"):
+ _mark_avoid_convert_recursively(i.annotation)
+
+ self.generic_visit(def_node)
+ self._current_function_name = None
+
+ return def_node
+
+ def visit_Call(self, call_node): # type: (ast.Call) -> Any
+ """
+ Replace a call or method
+ """
+ self.generic_visit(call_node)
+ func_member = call_node.func
+ call_modified = False
+ if self.replacements_disabled_for_functiondef:
+ return call_node
+
+ if isinstance(func_member, ast.Name) and func_member.id:
+ # Normal function call with func=Name(...), just change the name
+ func_name_node = func_member.id
+ aspect = self._aspect_functions.get(func_name_node)
+ if aspect:
+ # Send 0 as flag_added_args value
+ call_node.args.insert(0, self._int_constant(call_node, 0))
+ # Insert original function name as first parameter
+ call_node.args = self._add_original_function_as_arg(call_node, True)
+ # Substitute function call
+ call_node.func = self._attr_node(call_node, aspect)
+ self.ast_modified = call_modified = True
+ else:
+ sink_point = self._sinkpoints_functions.get(func_name_node)
+ if sink_point:
+ call_node.func = self._attr_node(call_node, sink_point)
+ self.ast_modified = call_modified = True
+ # Call [attr] -> Attribute [value]-> Attribute [value]-> Attribute
+ # a.b.c.method()
+ # replaced_method(a.b.c)
+ elif isinstance(func_member, ast.Attribute):
+ # Method call:
+ method_name = func_member.attr
+
+ if self._is_call_excluded(method_name):
+ # Early return if method is excluded
+ return call_node
+
+ if self._is_string_format_with_literals(call_node):
+ return call_node
+
+ aspect = self._aspect_methods.get(method_name)
+
+ if aspect:
+ # Move the Attribute.value to 'args'
+ new_arg = func_member.value
+ call_node.args.insert(0, new_arg)
+ # Send 1 as flag_added_args value
+ call_node.args.insert(0, self._int_constant(call_node, 1))
+
+ # Insert None as first parameter instead of a.b.c.method
+ # to avoid unexpected side effects such as a.b.read(4).method
+ call_node.args.insert(0, self._none_constant(call_node))
+
+ # Create a new Name node for the replacement and set it as node.func
+ call_node.func = self._attr_node(call_node, aspect)
+ self.ast_modified = call_modified = True
+
+ elif hasattr(func_member.value, "id") or hasattr(func_member.value, "attr"):
+ aspect = self._aspect_modules.get(method_name, None)
+ if aspect:
+ # Send 0 as flag_added_args value
+ call_node.args.insert(0, self._int_constant(call_node, 0))
+ # Move the Function to 'args'
+ call_node.args.insert(0, call_node.func)
+
+ # Create a new Name node for the replacement and set it as node.func
+ call_node.func = self._attr_node(call_node, aspect)
+ self.ast_modified = call_modified = True
+
+ if self.codetype == CODE_TYPE_FIRST_PARTY:
+ # Function replacement case
+ if isinstance(call_node.func, ast.Name):
+ aspect = self._should_replace_with_taint_sink(call_node, True)
+ if aspect:
+ # Send 0 as flag_added_args value
+ call_node.args.insert(0, self._int_constant(call_node, 0))
+ call_node.args = self._add_original_function_as_arg(call_node, False)
+ call_node.func = self._attr_node(call_node, TAINT_SINK_FUNCTION_REPLACEMENT)
+ self.ast_modified = call_modified = True
+
+ # Method replacement case
+ elif isinstance(call_node.func, ast.Attribute):
+ aspect = self._should_replace_with_taint_sink(call_node, False)
+ if aspect:
+ # Send 0 as flag_added_args value
+ call_node.args.insert(0, self._int_constant(call_node, 0))
+ # Create a new Name node for the replacement and set it as node.func
+ call_node.args = self._add_original_function_as_arg(call_node, False)
+ call_node.func = self._attr_node(call_node, TAINT_SINK_FUNCTION_REPLACEMENT)
+ self.ast_modified = call_modified = True
+
+ if call_modified:
+ _set_metric_iast_instrumented_propagation()
+
+ return call_node
+
+ def visit_BinOp(self, call_node): # type: (ast.BinOp) -> Any
+ """
+ Replace a binary operator
+ """
+ self.generic_visit(call_node)
+ operator = call_node.op
+
+ aspect = self._aspect_operators.get(operator.__class__)
+ if aspect:
+ self.ast_modified = True
+ _set_metric_iast_instrumented_propagation()
+
+ return ast.Call(self._attr_node(call_node, aspect), [call_node.left, call_node.right], [])
+
+ return call_node
+
+ def visit_FormattedValue(self, fmt_value_node): # type: (ast.FormattedValue) -> Any
+ """
+ Visit a FormattedValue node which are the constituent atoms for the
+ JoinedStr which are used to implement f-strings.
+ """
+
+ self.generic_visit(fmt_value_node)
+
+ if hasattr(fmt_value_node, "value") and self._is_node_constant_or_binop(fmt_value_node.value):
+ return fmt_value_node
+
+ func_name_node = self._attr_node(fmt_value_node, self._aspect_format_value)
+
+ options_int = self._node(
+ ast.Constant,
+ fmt_value_node,
+ value=fmt_value_node.conversion,
+ kind=None,
+ )
+
+ format_spec = fmt_value_node.format_spec if fmt_value_node.format_spec else self._none_constant(fmt_value_node)
+ call_node = self._call_node(
+ fmt_value_node,
+ func=func_name_node,
+ args=[fmt_value_node.value, options_int, format_spec],
+ )
+
+ self.ast_modified = True
+ _set_metric_iast_instrumented_propagation()
+ return call_node
+
+ def visit_JoinedStr(self, joinedstr_node): # type: (ast.JoinedStr) -> Any
+ """
+ Replaced the JoinedStr AST node with a Call to the replacement function. Most of
+ the work inside fstring is done by visit_FormattedValue above.
+ """
+ self.generic_visit(joinedstr_node)
+
+ if all(
+ map(
+ lambda x: isinstance(x, ast.FormattedValue) or self._is_node_constant_or_binop(x),
+ joinedstr_node.values,
+ )
+ ):
+ return joinedstr_node
+
+ func_name_node = self._attr_node(
+ joinedstr_node,
+ self._aspect_build_string,
+ ctx=ast.Load(),
+ )
+ call_node = self._call_node(
+ joinedstr_node,
+ func=func_name_node,
+ args=joinedstr_node.values,
+ )
+
+ self.ast_modified = True
+ _set_metric_iast_instrumented_propagation()
+ return call_node
+
+ def visit_AugAssign(self, augassign_node): # type: (ast.AugAssign) -> Any
+ """Replace an inplace add or multiply."""
+ if isinstance(augassign_node.target, ast.Subscript):
+ # Can't augassign to function call, ignore this node
+ augassign_node.target.avoid_convert = True # type: ignore[attr-defined]
+ self.generic_visit(augassign_node)
+ return augassign_node
+
+ # TODO: Replace an inplace add or multiply (+= / *=)
+ return augassign_node
+
+ def visit_Assign(self, assign_node): # type: (ast.Assign) -> Any
+ """
+ Add the ignore marks for left-side subscripts or list/tuples to avoid problems
+ later with the visit_Subscript node.
+ """
+ if isinstance(assign_node.value, ast.Subscript):
+ if hasattr(assign_node.value, "value") and hasattr(assign_node.value.value, "id"):
+ # Best effort to avoid converting type definitions
+ if assign_node.value.value.id in (
+ "Callable",
+ "Dict",
+ "Generator",
+ "List",
+ "Optional",
+ "Sequence",
+ "Tuple",
+ "Type",
+ "TypeVar",
+ "Union",
+ ):
+ _mark_avoid_convert_recursively(assign_node.value)
+
+ for target in assign_node.targets:
+ if isinstance(target, ast.Subscript):
+ # We can't assign to a function call, which is anyway going to rewrite
+ # the index destination so we just ignore that target
+ target.avoid_convert = True # type: ignore[attr-defined]
+ elif isinstance(target, (List, ast.Tuple)):
+ # Same for lists/tuples on the left side of the assignment
+ for element in target.elts:
+ if isinstance(element, ast.Subscript):
+ element.avoid_convert = True # type: ignore[attr-defined]
+
+ # Create a normal assignment. This way we decompose multiple assignments
+ self.generic_visit(assign_node)
+ return assign_node
+
+ def visit_Delete(self, assign_node): # type: (ast.Delete) -> Any
+ # del replaced_index(foo, bar) would fail so avoid converting the right hand side
+ # since it's going to be deleted anyway
+
+ for target in assign_node.targets:
+ if isinstance(target, ast.Subscript):
+ target.avoid_convert = True # type: ignore[attr-defined]
+
+ self.generic_visit(assign_node)
+ return assign_node
+
+ def visit_AnnAssign(self, node): # type: (ast.AnnAssign) -> Any
+ # AnnAssign is a type annotation, we don't need to convert it
+ # and we avoid converting any subscript inside it.
+ _mark_avoid_convert_recursively(node)
+ self.generic_visit(node)
+ return node
+
+ def visit_ClassDef(self, node): # type: (ast.ClassDef) -> Any
+ for i in node.bases:
+ _mark_avoid_convert_recursively(i)
+
+ self.generic_visit(node)
+ return node
+
+ def visit_Subscript(self, subscr_node): # type: (ast.Subscript) -> Any
+ """
+ Turn an indexes[1] and slices[0:1:2] into the replacement function call
+ Optimization: dont convert if the indexes are strings
+ """
+ self.generic_visit(subscr_node)
+
+ # We mark nodes to avoid_convert (check visit_Delete, visit_AugAssign, visit_Assign) due to complex
+ # expressions that raise errors when try to replace with index aspects
+ if hasattr(subscr_node, "avoid_convert"):
+ return subscr_node
+
+ # Optimization: String literal slices and indexes are not patched
+ if self._is_string_node(subscr_node.value):
+ return subscr_node
+
+ attr_node = self._attr_node(subscr_node, "")
+
+ call_node = self._call_node(
+ subscr_node,
+ func=attr_node,
+ args=[],
+ )
+ if isinstance(subscr_node.slice, ast.Slice):
+ # Slice[0:1:2]. The other cases in this if are Indexes[0]
+ aspect_split = self._aspect_slice.split(".")
+ call_node.func.attr = aspect_split[1]
+ call_node.func.value.id = aspect_split[0]
+ none_node = self._none_constant(subscr_node)
+ lower = none_node if subscr_node.slice.lower is None else subscr_node.slice.lower
+ upper = none_node if subscr_node.slice.upper is None else subscr_node.slice.upper
+ step = none_node if subscr_node.slice.step is None else subscr_node.slice.step
+ call_node.args.extend([subscr_node.value, lower, upper, step])
+ self.ast_modified = True
+ elif PY39_PLUS:
+ if self._is_string_node(subscr_node.slice):
+ return subscr_node
+ # In Py39+ the if subscr_node.slice member is not a Slice, is directly an unwrapped value
+ # for the index (e.g. Constant for a number, Name for a var, etc)
+ aspect_split = self._aspect_index.split(".")
+ call_node.func.attr = aspect_split[1]
+ call_node.func.value.id = aspect_split[0]
+ call_node.args.extend([subscr_node.value, subscr_node.slice])
+ # TODO: python 3.8 isn't working correctly with index_aspect, tests raise:
+ # corrupted size vs. prev_size in fastbins
+ # Test failed with exit code -6
+ # https://app.circleci.com/pipelines/github/DataDog/dd-trace-py/46665/workflows/3cf1257c-feaf-4653-bb9c-fb840baa1776/jobs/3031799
+ # elif isinstance(subscr_node.slice, ast.Index):
+ # if self._is_string_node(subscr_node.slice.value): # type: ignore[attr-defined]
+ # return subscr_node
+ # aspect_split = self._aspect_index.split(".")
+ # call_node.func.attr = aspect_split[1]
+ # call_node.func.value.id = aspect_split[0]
+ # call_node.args.extend([subscr_node.value, subscr_node.slice.value]) # type: ignore[attr-defined]
+ else:
+ return subscr_node
+
+ self.ast_modified = True
+ return call_node
diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_input_info.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_input_info.py
new file mode 100644
index 0000000..64a11c8
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_input_info.py
@@ -0,0 +1,13 @@
+class Input_info(object):
+ __slots__ = ["name", "value", "origin"]
+
+ def __init__(self, name, value, origin):
+ self.name = name
+ self.value = value
+ self.origin = origin
+
+ def __eq__(self, other):
+ return self.name == other.name and self.value == other.value and self.origin == other.origin
+
+ def __repr__(self):
+ return "input_info(%s, %s, %s)" % (str(self.name), str(self.value), str(self.origin))
diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_loader.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_loader.py
new file mode 100644
index 0000000..24da1ee
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_loader.py
@@ -0,0 +1,29 @@
+#!/usr/bin/env python3
+
+from ddtrace.internal.logger import get_logger
+
+from ._ast.ast_patching import astpatch_module
+from ._utils import _is_iast_enabled
+
+
+log = get_logger(__name__)
+
+
+IS_IAST_ENABLED = _is_iast_enabled()
+
+
+def _exec_iast_patched_module(module_watchdog, module):
+ patched_source = None
+ if IS_IAST_ENABLED:
+ try:
+ module_path, patched_source = astpatch_module(module)
+ except Exception:
+ log.debug("Unexpected exception while AST patching", exc_info=True)
+ patched_source = None
+
+ if patched_source:
+ # Patched source is executed instead of original module
+ compiled_code = compile(patched_source, module_path, "exec")
+ exec(compiled_code, module.__dict__) # nosec B102
+ elif module_watchdog.loader is not None:
+ module_watchdog.loader.exec_module(module)
diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_metrics.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_metrics.py
new file mode 100644
index 0000000..b1fad73
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_metrics.py
@@ -0,0 +1,163 @@
+import os
+import sys
+import traceback
+from typing import Dict
+
+from ddtrace.appsec._constants import IAST
+from ddtrace.appsec._constants import IAST_SPAN_TAGS
+from ddtrace.appsec._deduplications import deduplication
+from ddtrace.internal import telemetry
+from ddtrace.internal.logger import get_logger
+from ddtrace.internal.telemetry.constants import TELEMETRY_NAMESPACE_TAG_IAST
+
+
+log = get_logger(__name__)
+
+TELEMETRY_OFF_NAME = "OFF"
+TELEMETRY_DEBUG_NAME = "DEBUG"
+TELEMETRY_MANDATORY_NAME = "MANDATORY"
+TELEMETRY_INFORMATION_NAME = "INFORMATION"
+
+TELEMETRY_DEBUG_VERBOSITY = 10
+TELEMETRY_INFORMATION_VERBOSITY = 20
+TELEMETRY_MANDATORY_VERBOSITY = 30
+TELEMETRY_OFF_VERBOSITY = 40
+
+METRICS_REPORT_LVLS = (
+ (TELEMETRY_DEBUG_VERBOSITY, TELEMETRY_DEBUG_NAME),
+ (TELEMETRY_INFORMATION_VERBOSITY, TELEMETRY_INFORMATION_NAME),
+ (TELEMETRY_MANDATORY_VERBOSITY, TELEMETRY_MANDATORY_NAME),
+ (TELEMETRY_OFF_VERBOSITY, TELEMETRY_OFF_NAME),
+)
+
+_IAST_SPAN_METRICS: Dict[str, int] = {}
+
+
+def get_iast_metrics_report_lvl(*args, **kwargs):
+ report_lvl_name = os.environ.get(IAST.TELEMETRY_REPORT_LVL, TELEMETRY_INFORMATION_NAME).upper()
+ report_lvl = 3
+ for lvl, lvl_name in METRICS_REPORT_LVLS:
+ if report_lvl_name == lvl_name:
+ return lvl
+ return report_lvl
+
+
+def metric_verbosity(lvl):
+ def wrapper(f):
+ if lvl >= get_iast_metrics_report_lvl():
+ try:
+ return f
+ except Exception:
+ log.warning("Error reporting IAST metrics", exc_info=True)
+ return lambda: None # noqa: E731
+
+ return wrapper
+
+
+@metric_verbosity(TELEMETRY_MANDATORY_VERBOSITY)
+@deduplication
+def _set_iast_error_metric(msg):
+ # type: (str) -> None
+ # Due to format_exc and format_exception returns the error and the last frame
+ try:
+ exception_type, exception_instance, _traceback_list = sys.exc_info()
+ res = []
+ # first 3 frames are this function, the exception in aspects and the error line
+ res.extend(traceback.format_stack(limit=10)[:-3])
+
+ # get the frame with the error and the error message
+ result = traceback.format_exception(exception_type, exception_instance, _traceback_list)
+ res.extend(result[1:])
+
+ stack_trace = "".join(res)
+ tags = {
+ "lib_language": "python",
+ }
+ telemetry.telemetry_writer.add_log("ERROR", msg, stack_trace=stack_trace, tags=tags)
+ except Exception:
+ log.warning("Error reporting ASM WAF logs metrics", exc_info=True)
+
+
+@metric_verbosity(TELEMETRY_MANDATORY_VERBOSITY)
+def _set_metric_iast_instrumented_source(source_type):
+ from ._taint_tracking._native.taint_tracking import origin_to_str # noqa: F401
+
+ telemetry.telemetry_writer.add_count_metric(
+ TELEMETRY_NAMESPACE_TAG_IAST, "instrumented.source", 1, (("source_type", origin_to_str(source_type)),)
+ )
+
+
+@metric_verbosity(TELEMETRY_MANDATORY_VERBOSITY)
+def _set_metric_iast_instrumented_propagation():
+ telemetry.telemetry_writer.add_count_metric(TELEMETRY_NAMESPACE_TAG_IAST, "instrumented.propagation", 1)
+
+
+@metric_verbosity(TELEMETRY_MANDATORY_VERBOSITY)
+def _set_metric_iast_instrumented_sink(vulnerability_type, counter=1):
+ telemetry.telemetry_writer.add_count_metric(
+ TELEMETRY_NAMESPACE_TAG_IAST, "instrumented.sink", counter, (("vulnerability_type", vulnerability_type),)
+ )
+
+
+@metric_verbosity(TELEMETRY_INFORMATION_VERBOSITY)
+def _set_metric_iast_executed_source(source_type):
+ from ._taint_tracking._native.taint_tracking import origin_to_str # noqa: F401
+
+ telemetry.telemetry_writer.add_count_metric(
+ TELEMETRY_NAMESPACE_TAG_IAST, "executed.source", 1, (("source_type", origin_to_str(source_type)),)
+ )
+
+
+@metric_verbosity(TELEMETRY_INFORMATION_VERBOSITY)
+def _set_metric_iast_executed_sink(vulnerability_type):
+ telemetry.telemetry_writer.add_count_metric(
+ TELEMETRY_NAMESPACE_TAG_IAST, "executed.sink", 1, (("vulnerability_type", vulnerability_type),)
+ )
+
+
+def _request_tainted():
+ from ._taint_tracking import num_objects_tainted
+
+ return num_objects_tainted()
+
+
+@metric_verbosity(TELEMETRY_INFORMATION_VERBOSITY)
+def _set_metric_iast_request_tainted():
+ total_objects_tainted = _request_tainted()
+ if total_objects_tainted > 0:
+ telemetry.telemetry_writer.add_count_metric(
+ TELEMETRY_NAMESPACE_TAG_IAST, "request.tainted", total_objects_tainted
+ )
+
+
+def _set_span_tag_iast_request_tainted(span):
+ total_objects_tainted = _request_tainted()
+
+ if total_objects_tainted > 0:
+ span.set_tag(IAST_SPAN_TAGS.TELEMETRY_REQUEST_TAINTED, total_objects_tainted)
+
+
+def _set_span_tag_iast_executed_sink(span):
+ data = get_iast_span_metrics()
+
+ if data is not None:
+ for key, value in data.items():
+ if key.startswith(IAST_SPAN_TAGS.TELEMETRY_EXECUTED_SINK):
+ span.set_tag(key, value)
+
+ reset_iast_span_metrics()
+
+
+def increment_iast_span_metric(prefix: str, metric_key: str, counter: int = 1) -> None:
+ data = get_iast_span_metrics()
+ full_key = prefix + "." + metric_key.lower()
+ result = data.get(full_key, 0)
+ data[full_key] = result + counter
+
+
+def get_iast_span_metrics() -> Dict:
+ return _IAST_SPAN_METRICS
+
+
+def reset_iast_span_metrics() -> None:
+ _IAST_SPAN_METRICS.clear()
diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_overhead_control_engine.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_overhead_control_engine.py
new file mode 100644
index 0000000..7862375
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_overhead_control_engine.py
@@ -0,0 +1,133 @@
+"""
+The Overhead control engine (OCE) is an element that by design ensures that the overhead does not go over a maximum
+limit. It will measure operations being executed in a request and it will deactivate detection
+(and therefore reduce the overhead to nearly 0) if a certain threshold is reached.
+"""
+import os
+import threading
+from typing import TYPE_CHECKING # noqa:F401
+
+from ddtrace.internal.logger import get_logger
+from ddtrace.sampler import RateSampler
+
+
+if TYPE_CHECKING: # pragma: no cover
+ from typing import Set # noqa:F401
+ from typing import Tuple # noqa:F401
+ from typing import Type # noqa:F401
+
+ from ddtrace.span import Span # noqa:F401
+
+log = get_logger(__name__)
+
+
+def get_request_sampling_value(): # type: () -> float
+ # Percentage of requests analyzed by IAST (default: 30%)
+ return float(os.environ.get("DD_IAST_REQUEST_SAMPLING", 30.0))
+
+
+MAX_REQUESTS = int(os.environ.get("DD_IAST_MAX_CONCURRENT_REQUESTS", 2))
+MAX_VULNERABILITIES_PER_REQUEST = int(os.environ.get("DD_IAST_VULNERABILITIES_PER_REQUEST", 2))
+
+
+class Operation(object):
+ """Common operation related to Overhead Control Engine (OCE). Every vulnerabilities/taint_sinks should inherit
+ from this class. OCE instance calls these methods to control the overhead produced in each request.
+ """
+
+ _lock = threading.Lock()
+ _vulnerability_quota = MAX_VULNERABILITIES_PER_REQUEST
+ _reported_vulnerabilities = set() # type: Set[Tuple[str, int]]
+
+ @classmethod
+ def reset(cls):
+ cls._vulnerability_quota = MAX_VULNERABILITIES_PER_REQUEST
+ cls._reported_vulnerabilities = set()
+
+ @classmethod
+ def acquire_quota(cls):
+ # type: () -> bool
+ cls._lock.acquire()
+ result = False
+ if cls._vulnerability_quota > 0:
+ cls._vulnerability_quota -= 1
+ result = True
+ cls._lock.release()
+ return result
+
+ @classmethod
+ def increment_quota(cls):
+ # type: () -> bool
+ cls._lock.acquire()
+ result = False
+ if cls._vulnerability_quota < MAX_VULNERABILITIES_PER_REQUEST:
+ cls._vulnerability_quota += 1
+ result = True
+ cls._lock.release()
+ return result
+
+ @classmethod
+ def has_quota(cls):
+ # type: () -> bool
+ cls._lock.acquire()
+ result = cls._vulnerability_quota > 0
+ cls._lock.release()
+ return result
+
+ @classmethod
+ def is_not_reported(cls, filename, lineno):
+ # type: (str, int) -> bool
+ vulnerability_id = (filename, lineno)
+ if vulnerability_id in cls._reported_vulnerabilities:
+ return False
+
+ cls._reported_vulnerabilities.add(vulnerability_id)
+ return True
+
+
+class OverheadControl(object):
+ """This class is meant to control the overhead introduced by IAST analysis.
+ The goal is to do sampling at different levels of the IAST analysis (per process, per request, etc)
+ """
+
+ _lock = threading.Lock()
+ _request_quota = MAX_REQUESTS
+ _vulnerabilities = set() # type: Set[Type[Operation]]
+ _sampler = RateSampler(sample_rate=get_request_sampling_value() / 100.0)
+
+ def reconfigure(self):
+ self._sampler = RateSampler(sample_rate=get_request_sampling_value() / 100.0)
+
+ def acquire_request(self, span):
+ # type: (Span) -> bool
+ """Decide whether if IAST analysis will be done for this request.
+ - Block a request's quota at start of the request to limit simultaneous requests analyzed.
+ - Use sample rating to analyze only a percentage of the total requests (30% by default).
+ """
+ if self._request_quota <= 0 or not self._sampler.sample(span):
+ return False
+
+ with self._lock:
+ if self._request_quota <= 0:
+ return False
+
+ self._request_quota -= 1
+
+ return True
+
+ def release_request(self):
+ """increment request's quota at end of the request."""
+ with self._lock:
+ self._request_quota += 1
+ self.vulnerabilities_reset_quota()
+
+ def register(self, klass):
+ # type: (Type[Operation]) -> Type[Operation]
+ """Register vulnerabilities/taint_sinks. This set of elements will restart for each request."""
+ self._vulnerabilities.add(klass)
+ return klass
+
+ def vulnerabilities_reset_quota(self):
+ # type: () -> None
+ for k in self._vulnerabilities:
+ k.reset()
diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_patch.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_patch.py
new file mode 100644
index 0000000..d6d8fef
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_patch.py
@@ -0,0 +1,177 @@
+import ctypes
+import gc
+import sys
+from typing import TYPE_CHECKING # noqa:F401
+
+from ddtrace.internal.logger import get_logger
+from ddtrace.vendor.wrapt import FunctionWrapper
+from ddtrace.vendor.wrapt import resolve_path
+
+from ._utils import _is_iast_enabled
+
+
+if TYPE_CHECKING: # pragma: no cover
+ from typing import Any # noqa:F401
+ from typing import Callable # noqa:F401
+ from typing import Dict # noqa:F401
+ from typing import Optional # noqa:F401
+
+
+_DD_ORIGINAL_ATTRIBUTES = {} # type: Dict[Any, Any]
+
+log = get_logger(__name__)
+
+
+def set_and_check_module_is_patched(module_str, default_attr="_datadog_patch"):
+ # type: (str, str) -> Optional[bool]
+ try:
+ __import__(module_str)
+ module = sys.modules[module_str]
+ if getattr(module, default_attr, False):
+ return False
+ setattr(module, default_attr, True)
+ except ImportError:
+ pass
+ return True
+
+
+def set_module_unpatched(module_str, default_attr="_datadog_patch"):
+ # type: (str, str) -> None
+ try:
+ __import__(module_str)
+ module = sys.modules[module_str]
+ setattr(module, default_attr, False)
+ except ImportError:
+ pass
+
+
+def try_wrap_function_wrapper(module, name, wrapper):
+ # type: (str, str, Callable) -> None
+ try:
+ wrap_object(module, name, FunctionWrapper, (wrapper,))
+ except (ImportError, AttributeError):
+ log.debug("IAST patching. Module %s.%s not exists", module, name)
+
+
+def try_unwrap(module, name):
+ try:
+ (parent, attribute, _) = resolve_path(module, name)
+ if (parent, attribute) in _DD_ORIGINAL_ATTRIBUTES:
+ original = _DD_ORIGINAL_ATTRIBUTES[(parent, attribute)]
+ apply_patch(parent, attribute, original)
+ del _DD_ORIGINAL_ATTRIBUTES[(parent, attribute)]
+ except ModuleNotFoundError:
+ pass
+
+
+def apply_patch(parent, attribute, replacement):
+ try:
+ current_attribute = getattr(parent, attribute)
+ # Avoid overwriting the original function if we call this twice
+ if not isinstance(current_attribute, FunctionWrapper):
+ _DD_ORIGINAL_ATTRIBUTES[(parent, attribute)] = current_attribute
+ setattr(parent, attribute, replacement)
+ except (TypeError, AttributeError):
+ patch_builtins(parent, attribute, replacement)
+
+
+def wrap_object(module, name, factory, args=(), kwargs=None):
+ if kwargs is None:
+ kwargs = {}
+ (parent, attribute, original) = resolve_path(module, name)
+ wrapper = factory(original, *args, **kwargs)
+ apply_patch(parent, attribute, wrapper)
+ return wrapper
+
+
+def patchable_builtin(klass):
+ refs = gc.get_referents(klass.__dict__)
+ return refs[0]
+
+
+def patch_builtins(klass, attr, value):
+ """Based on forbiddenfruit package:
+ https://github.com/clarete/forbiddenfruit/blob/master/forbiddenfruit/__init__.py#L421
+ ---
+ Patch a built-in `klass` with `attr` set to `value`
+
+ This function monkey-patches the built-in python object `attr` adding a new
+ attribute to it. You can add any kind of argument to the `class`.
+
+ It's possible to attach methods as class methods, just do the following:
+
+ >>> def myclassmethod(cls):
+ ... return cls(1.5)
+ >>> curse(float, "myclassmethod", classmethod(myclassmethod))
+ >>> float.myclassmethod()
+ 1.5
+
+ Methods will be automatically bound, so don't forget to add a self
+ parameter to them, like this:
+
+ >>> def hello(self):
+ ... return self * 2
+ >>> curse(str, "hello", hello)
+ >>> "yo".hello()
+ "yoyo"
+ """
+ dikt = patchable_builtin(klass)
+
+ old_value = dikt.get(attr, None)
+ old_name = "_c_%s" % attr # do not use .format here, it breaks py2.{5,6}
+
+ # Patch the thing
+ dikt[attr] = value
+
+ if old_value:
+ dikt[old_name] = old_value
+
+ try:
+ dikt[attr].__name__ = old_value.__name__
+ except (AttributeError, TypeError): # py2.5 will raise `TypeError`
+ pass
+ try:
+ dikt[attr].__qualname__ = old_value.__qualname__
+ except AttributeError:
+ pass
+
+ ctypes.pythonapi.PyType_Modified(ctypes.py_object(klass))
+
+
+def if_iast_taint_returned_object_for(origin, wrapped, instance, args, kwargs):
+ value = wrapped(*args, **kwargs)
+
+ if _is_iast_enabled():
+ try:
+ from ._taint_tracking import is_pyobject_tainted
+ from ._taint_tracking import taint_pyobject
+ from .processor import AppSecIastSpanProcessor
+
+ if not AppSecIastSpanProcessor.is_span_analyzed():
+ return value
+
+ if not is_pyobject_tainted(value):
+ name = str(args[0]) if len(args) else "http.request.body"
+ return taint_pyobject(pyobject=value, source_name=name, source_value=value, source_origin=origin)
+ except Exception:
+ log.debug("Unexpected exception while tainting pyobject", exc_info=True)
+ return value
+
+
+def if_iast_taint_yield_tuple_for(origins, wrapped, instance, args, kwargs):
+ if _is_iast_enabled():
+ from ._taint_tracking import taint_pyobject
+ from .processor import AppSecIastSpanProcessor
+
+ if not AppSecIastSpanProcessor.is_span_analyzed():
+ for key, value in wrapped(*args, **kwargs):
+ yield key, value
+
+ for key, value in wrapped(*args, **kwargs):
+ new_key = taint_pyobject(pyobject=key, source_name=key, source_value=key, source_origin=origins[0])
+ new_value = taint_pyobject(pyobject=value, source_name=key, source_value=value, source_origin=origins[1])
+ yield new_key, new_value
+
+ else:
+ for key, value in wrapped(*args, **kwargs):
+ yield key, value
diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_patch_modules.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_patch_modules.py
new file mode 100644
index 0000000..05a6900
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_patch_modules.py
@@ -0,0 +1,27 @@
+from ddtrace.vendor.wrapt.importer import when_imported
+
+
+IAST_PATCH = {
+ "command_injection": True,
+ "path_traversal": True,
+ "weak_cipher": True,
+ "weak_hash": True,
+}
+
+
+def patch_iast(patch_modules=IAST_PATCH):
+ """Load IAST vulnerabilities sink points.
+
+ IAST_PATCH: list of implemented vulnerabilities
+ """
+ # TODO: Devise the correct patching strategy for IAST
+ from ddtrace._monkey import _on_import_factory
+
+ for module in (m for m, e in patch_modules.items() if e):
+ when_imported("hashlib")(
+ _on_import_factory(module, prefix="ddtrace.appsec._iast.taint_sinks", raise_errors=False)
+ )
+
+ when_imported("json")(
+ _on_import_factory("json_tainting", prefix="ddtrace.appsec._iast._patches", raise_errors=False)
+ )
diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_patches/json_tainting.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_patches/json_tainting.py
new file mode 100644
index 0000000..0984b7a
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_patches/json_tainting.py
@@ -0,0 +1,82 @@
+from ddtrace.internal.logger import get_logger
+from ddtrace.settings.asm import config as asm_config
+
+from .._patch import set_and_check_module_is_patched
+from .._patch import set_module_unpatched
+from .._patch import try_unwrap
+from .._patch import try_wrap_function_wrapper
+
+
+log = get_logger(__name__)
+
+
+_DEFAULT_ATTR = "_datadog_json_tainting_patch"
+
+
+def get_version():
+ # type: () -> str
+ return ""
+
+
+def unpatch_iast():
+ # type: () -> None
+ set_module_unpatched("json", default_attr=_DEFAULT_ATTR)
+ try_unwrap("json", "loads")
+ if asm_config._iast_lazy_taint:
+ try_unwrap("json.encoder", "JSONEncoder.default")
+ try_unwrap("simplejson.encoder", "JSONEncoder.default")
+
+
+def patch():
+ # type: () -> None
+ """Wrap functions which interact with file system."""
+ if not set_and_check_module_is_patched("json", default_attr=_DEFAULT_ATTR):
+ return
+ try_wrap_function_wrapper("json", "loads", wrapped_loads)
+ if asm_config._iast_lazy_taint:
+ try_wrap_function_wrapper("json.encoder", "JSONEncoder.default", patched_json_encoder_default)
+ try_wrap_function_wrapper("simplejson.encoder", "JSONEncoder.default", patched_json_encoder_default)
+
+
+def wrapped_loads(wrapped, instance, args, kwargs):
+ from .._taint_utils import taint_structure
+
+ obj = wrapped(*args, **kwargs)
+ if asm_config._iast_enabled:
+ try:
+ from .._taint_tracking import get_tainted_ranges
+ from .._taint_tracking import is_pyobject_tainted
+ from .._taint_tracking import taint_pyobject
+ from ..processor import AppSecIastSpanProcessor
+
+ if not AppSecIastSpanProcessor.is_span_analyzed():
+ return obj
+
+ if is_pyobject_tainted(args[0]) and obj:
+ # tainting object
+ ranges = get_tainted_ranges(args[0])
+ if not ranges:
+ return obj
+ # take the first source as main source
+ source = ranges[0].source
+ if isinstance(obj, dict):
+ obj = taint_structure(obj, source.origin, source.origin)
+ elif isinstance(obj, list):
+ obj = taint_structure(obj, source.origin, source.origin)
+ elif isinstance(obj, (str, bytes, bytearray)):
+ obj = taint_pyobject(obj, source.name, source.value, source.origin)
+ pass
+ except Exception:
+ log.debug("Unexpected exception while reporting vulnerability", exc_info=True)
+ raise
+ return obj
+
+
+def patched_json_encoder_default(original_func, instance, args, kwargs):
+ from .._taint_utils import LazyTaintDict
+ from .._taint_utils import LazyTaintList
+
+ if isinstance(args[0], (LazyTaintList, LazyTaintDict)):
+ return args[0]._obj
+
+ return original_func(*args, **kwargs)
diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_stacktrace.cpython-311-x86_64-linux-gnu.so b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_stacktrace.cpython-311-x86_64-linux-gnu.so
new file mode 100755
index 0000000..28a86bf
Binary files /dev/null and b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_stacktrace.cpython-311-x86_64-linux-gnu.so differ
diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_taint_dict.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_taint_dict.py
new file mode 100644
index 0000000..97df240
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_taint_dict.py
@@ -0,0 +1,21 @@
+#!/usr/bin/env python3
+#
+from typing import TYPE_CHECKING # noqa:F401
+
+
+if TYPE_CHECKING:
+ from typing import Dict # noqa:F401
+ from typing import Tuple # noqa:F401
+
+ from ._taint_tracking import Source # noqa:F401
+
+_IAST_TAINT_DICT = {} # type: Dict[int, Tuple[Tuple[Source, int, int],...]]
+
+
+def get_taint_dict(): # type: () -> Dict[int, Tuple[Tuple[Source, int, int],...]]
+ return _IAST_TAINT_DICT
+
+
+def clear_taint_mapping(): # type: () -> None
+ global _IAST_TAINT_DICT
+ _IAST_TAINT_DICT = {}
diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_taint_tracking/CMakeLists.txt b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_taint_tracking/CMakeLists.txt
new file mode 100644
index 0000000..9caa9fc
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_taint_tracking/CMakeLists.txt
@@ -0,0 +1,76 @@
+cmake_minimum_required(VERSION 3.19)
+include(FetchContent)
+
+set(APP_NAME _native)
+option(BUILD_MACOS "Build for MacOS" OFF)
+
+project(${APP_NAME})
+
+set(CMAKE_CXX_STANDARD 17)
+
+# -U_FORTIFY_SOURCE to fix a bug in alpine and pybind11
+# https://github.com/pybind/pybind11/issues/1650
+# https://gitlab.alpinelinux.org/alpine/aports/-/issues/8626
+add_compile_options(-fPIC -fexceptions -fvisibility=hidden -fpermissive -pthread -Wall -Wno-unknown-pragmas -U_FORTIFY_SOURCE)
+
+if(BUILD_MACOS)
+ # https://pybind11.readthedocs.io/en/stable/compiling.html#building-manually
+ message(STATUS "Compile options for MacOS")
+ add_link_options(-ldl -undefined dynamic_lookup)
+else()
+ message(STATUS "Compile options for Linux/Win")
+endif(BUILD_MACOS)
+unset(BUILD_MACOS CACHE)
+
+if(CMAKE_BUILD_TYPE STREQUAL "Release")
+ message("Release mode: using abseil")
+ FetchContent_Declare(
+ absl
+ URL "https://github.com/abseil/abseil-cpp/archive/refs/tags/20230802.1.zip"
+ )
+ FetchContent_MakeAvailable(absl)
+else()
+ message("Debug mode: not using abseil")
+endif()
+
+include_directories(".")
+
+file(GLOB SOURCE_FILES "*.cpp"
+ "Aspects/*.cpp"
+ "Initializer/*.cpp"
+ "TaintedOps/*.cpp"
+ "TaintTracking/*.cpp"
+ "Utils/*.cpp")
+file(GLOB HEADER_FILES "*.h"
+ "Aspects/*.h"
+ "Initializer/*.h"
+ "TaintedOps/*.h"
+ "TaintTracking/*.h"
+ "Utils/*.h"
+ )
+
+# Debug messages
+message(STATUS "PYTHON_LIBRARIES = ${Python_LIBRARIES}")
+message(STATUS "PYTHON_EXECUTABLE = ${Python_EXECUTABLE}")
+message(STATUS "PYTHON_INCLUDE_DIRS = ${Python_INCLUDE_DIRS}")
+message(STATUS "Python_EXECUTABLE = ${Python_EXECUTABLE}")
+
+add_subdirectory(_vendor/pybind11)
+
+pybind11_add_module(_native SHARED ${SOURCE_FILES} ${HEADER_FILES})
+get_filename_component(PARENT_DIR ${CMAKE_CURRENT_LIST_DIR} DIRECTORY)
+set_target_properties(
+ _native
+ PROPERTIES
+ LIBRARY_OUTPUT_DIRECTORY "${CMAKE_CURRENT_LIST_DIR}"
+)
+
+if(CMAKE_BUILD_TYPE STREQUAL "Release")
+ target_link_libraries(${APP_NAME} PRIVATE absl::node_hash_map)
+endif()
+
+install(TARGETS _native DESTINATION
+ LIBRARY DESTINATION ${LIB_INSTALL_DIR}
+ ARCHIVE DESTINATION ${LIB_INSTALL_DIR}
+ RUNTIME DESTINATION ${LIB_INSTALL_DIR}
+)
diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_taint_tracking/README.txt b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_taint_tracking/README.txt
new file mode 100644
index 0000000..f7f67f2
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_taint_tracking/README.txt
@@ -0,0 +1,37 @@
+# Compile extension with Cmake
+
+```bash
+sh clean.sh
+cmake -DPYTHON_EXECUTABLE:FILEPATH=/usr/bin/python3.11 . && \
+ make -j _native && \
+ mv lib_native.so _native.so
+```
+
+## Verify compilation was correctly
+
+```bash
+python3.11
+```
+```python
+from _native import Source, TaintRange
+source = Source(name="aaa", value="bbbb", origin="ccc")
+source = Source("aaa", "bbbb", "ccc")
+```
+
+## Clean Cmake folders
+
+```bash
+./clean.sh
+```
+
+
+## Debug with Valgrind
+
+wget http://svn.python.org/projects/python/trunk/Misc/valgrind-python.supp
+
+valgrind --tool=memcheck --suppressions=ddtrace/appsec/_iast/_taint_tracking/valgrind-python.supp \
+python ddtrace/appsec/_iast/_taint_tracking/bench_overload.py --log-file="valgrind_bench_overload.out"
+
+# Debug with gdb
+
+gdb --args python -m pytest tests/appsec/iast/test_command_injection.py
diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_taint_tracking/__init__.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_taint_tracking/__init__.py
new file mode 100644
index 0000000..9506d7a
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_taint_tracking/__init__.py
@@ -0,0 +1,143 @@
+# #!/usr/bin/env python3
+# flake8: noqa
+from typing import TYPE_CHECKING
+
+from .._metrics import _set_metric_iast_executed_source
+from .._utils import _is_python_version_supported
+
+
+if _is_python_version_supported():
+ from .. import oce
+ from ._native import ops
+ from ._native.aspect_helpers import _convert_escaped_text_to_tainted_text
+ from ._native.aspect_helpers import as_formatted_evidence
+ from ._native.aspect_helpers import common_replace
+ from ._native.aspect_format import _format_aspect
+ from ._native.aspect_helpers import parse_params
+ from ._native.initializer import active_map_addreses_size
+ from ._native.initializer import create_context
+ from ._native.initializer import debug_taint_map
+ from ._native.initializer import destroy_context
+ from ._native.initializer import initializer_size
+ from ._native.initializer import num_objects_tainted
+ from ._native.initializer import reset_context
+ from ._native.taint_tracking import OriginType
+ from ._native.taint_tracking import Source
+ from ._native.taint_tracking import TagMappingMode
+ from ._native.taint_tracking import are_all_text_all_ranges
+ from ._native.taint_tracking import get_range_by_hash
+ from ._native.taint_tracking import get_ranges
+ from ._native.taint_tracking import is_notinterned_notfasttainted_unicode
+ from ._native.taint_tracking import is_tainted
+ from ._native.taint_tracking import origin_to_str
+ from ._native.taint_tracking import set_fast_tainted_if_notinterned_unicode
+ from ._native.taint_tracking import set_ranges
+ from ._native.taint_tracking import copy_ranges_from_strings
+ from ._native.taint_tracking import copy_and_shift_ranges_from_strings
+ from ._native.taint_tracking import shift_taint_range
+ from ._native.taint_tracking import shift_taint_ranges
+ from ._native.taint_tracking import str_to_origin
+ from ._native.taint_tracking import taint_range as TaintRange
+
+ new_pyobject_id = ops.new_pyobject_id
+ set_ranges_from_values = ops.set_ranges_from_values
+ is_pyobject_tainted = is_tainted
+
+if TYPE_CHECKING:
+ from typing import Any
+ from typing import Dict
+ from typing import List
+ from typing import Tuple
+ from typing import Union
+
+
+__all__ = [
+ "_convert_escaped_text_to_tainted_text",
+ "new_pyobject_id",
+ "setup",
+ "Source",
+ "OriginType",
+ "TagMappingMode",
+ "TaintRange",
+ "get_ranges",
+ "set_ranges",
+ "copy_ranges_from_strings",
+ "copy_and_shift_ranges_from_strings",
+ "are_all_text_all_ranges",
+ "shift_taint_range",
+ "shift_taint_ranges",
+ "get_range_by_hash",
+ "is_notinterned_notfasttainted_unicode",
+ "set_fast_tainted_if_notinterned_unicode",
+ "aspect_helpers",
+ "reset_context",
+ "destroy_context",
+ "initializer_size",
+ "active_map_addreses_size",
+ "create_context",
+ "str_to_origin",
+ "origin_to_str",
+ "common_replace",
+ "_format_aspect",
+ "as_formatted_evidence",
+ "parse_params",
+ "num_objects_tainted",
+ "debug_taint_map",
+]
+
+
+def taint_pyobject(pyobject, source_name, source_value, source_origin=None):
+ # type: (Any, Any, Any, OriginType) -> Any
+
+ # Pyobject must be Text with len > 1
+ if not pyobject or not isinstance(pyobject, (str, bytes, bytearray)):
+ return pyobject
+
+ if isinstance(source_name, (bytes, bytearray)):
+ source_name = str(source_name, encoding="utf8", errors="ignore")
+ if isinstance(source_name, OriginType):
+ source_name = origin_to_str(source_name)
+
+ if isinstance(source_value, (bytes, bytearray)):
+ source_value = str(source_value, encoding="utf8", errors="ignore")
+ if source_origin is None:
+ source_origin = OriginType.PARAMETER
+
+ pyobject_newid = set_ranges_from_values(pyobject, len(pyobject), source_name, source_value, source_origin)
+ _set_metric_iast_executed_source(source_origin)
+ return pyobject_newid
+
+
+def taint_pyobject_with_ranges(pyobject, ranges): # type: (Any, tuple) -> None
+ set_ranges(pyobject, tuple(ranges))
+
+
+def get_tainted_ranges(pyobject): # type: (Any) -> tuple
+ return get_ranges(pyobject)
+
+
+def taint_ranges_as_evidence_info(pyobject):
+ # type: (Any) -> Tuple[List[Dict[str, Union[Any, int]]], list[Source]]
+ value_parts = []
+ sources = []
+ current_pos = 0
+ tainted_ranges = get_tainted_ranges(pyobject)
+ if not len(tainted_ranges):
+ return ([{"value": pyobject}], [])
+
+ for _range in tainted_ranges:
+ if _range.start > current_pos:
+ value_parts.append({"value": pyobject[current_pos : _range.start]})
+
+ if _range.source not in sources:
+ sources.append(_range.source)
+
+ value_parts.append(
+ {"value": pyobject[_range.start : _range.start + _range.length], "source": sources.index(_range.source)}
+ )
+ current_pos = _range.start + _range.length
+
+ if current_pos < len(pyobject):
+ value_parts.append({"value": pyobject[current_pos:]})
+
+ return value_parts, sources
diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_taint_tracking/_native.cpython-311-x86_64-linux-gnu.so b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_taint_tracking/_native.cpython-311-x86_64-linux-gnu.so
new file mode 100755
index 0000000..ee286c9
Binary files /dev/null and b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_taint_tracking/_native.cpython-311-x86_64-linux-gnu.so differ
diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_taint_tracking/_vendor/__init__.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_taint_tracking/_vendor/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/CMakeLists.txt b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/CMakeLists.txt
new file mode 100644
index 0000000..87ec103
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/CMakeLists.txt
@@ -0,0 +1,322 @@
+# CMakeLists.txt -- Build system for the pybind11 modules
+#
+# Copyright (c) 2015 Wenzel Jakob
+#
+# All rights reserved. Use of this source code is governed by a
+# BSD-style license that can be found in the LICENSE file.
+
+cmake_minimum_required(VERSION 3.5)
+
+# The `cmake_minimum_required(VERSION 3.5...3.26)` syntax does not work with
+# some versions of VS that have a patched CMake 3.11. This forces us to emulate
+# the behavior using the following workaround:
+if(${CMAKE_VERSION} VERSION_LESS 3.26)
+ cmake_policy(VERSION ${CMAKE_MAJOR_VERSION}.${CMAKE_MINOR_VERSION})
+else()
+ cmake_policy(VERSION 3.26)
+endif()
+
+# Avoid infinite recursion if tests include this as a subdirectory
+if(DEFINED PYBIND11_MASTER_PROJECT)
+ return()
+endif()
+
+# Extract project version from source
+file(STRINGS "${CMAKE_CURRENT_SOURCE_DIR}/include/pybind11/detail/common.h"
+ pybind11_version_defines REGEX "#define PYBIND11_VERSION_(MAJOR|MINOR|PATCH) ")
+
+foreach(ver ${pybind11_version_defines})
+ if(ver MATCHES [[#define PYBIND11_VERSION_(MAJOR|MINOR|PATCH) +([^ ]+)$]])
+ set(PYBIND11_VERSION_${CMAKE_MATCH_1} "${CMAKE_MATCH_2}")
+ endif()
+endforeach()
+
+if(PYBIND11_VERSION_PATCH MATCHES [[\.([a-zA-Z0-9]+)$]])
+ set(pybind11_VERSION_TYPE "${CMAKE_MATCH_1}")
+endif()
+string(REGEX MATCH "^[0-9]+" PYBIND11_VERSION_PATCH "${PYBIND11_VERSION_PATCH}")
+
+project(
+ pybind11
+ LANGUAGES CXX
+ VERSION "${PYBIND11_VERSION_MAJOR}.${PYBIND11_VERSION_MINOR}.${PYBIND11_VERSION_PATCH}")
+
+# Standard includes
+include(GNUInstallDirs)
+include(CMakePackageConfigHelpers)
+include(CMakeDependentOption)
+
+if(NOT pybind11_FIND_QUIETLY)
+ message(STATUS "pybind11 v${pybind11_VERSION} ${pybind11_VERSION_TYPE}")
+endif()
+
+# Check if pybind11 is being used directly or via add_subdirectory
+if(CMAKE_SOURCE_DIR STREQUAL PROJECT_SOURCE_DIR)
+ ### Warn if not an out-of-source builds
+ if(CMAKE_CURRENT_SOURCE_DIR STREQUAL CMAKE_CURRENT_BINARY_DIR)
+ set(lines
+ "You are building in-place. If that is not what you intended to "
+ "do, you can clean the source directory with:\n"
+ "rm -r CMakeCache.txt CMakeFiles/ cmake_uninstall.cmake pybind11Config.cmake "
+ "pybind11ConfigVersion.cmake tests/CMakeFiles/\n")
+ message(AUTHOR_WARNING ${lines})
+ endif()
+
+ set(PYBIND11_MASTER_PROJECT ON)
+
+ if(OSX AND CMAKE_VERSION VERSION_LESS 3.7)
+ # Bug in macOS CMake < 3.7 is unable to download catch
+ message(WARNING "CMAKE 3.7+ needed on macOS to download catch, and newer HIGHLY recommended")
+ elseif(WINDOWS AND CMAKE_VERSION VERSION_LESS 3.8)
+ # Only tested with 3.8+ in CI.
+ message(WARNING "CMAKE 3.8+ tested on Windows, previous versions untested")
+ endif()
+
+ message(STATUS "CMake ${CMAKE_VERSION}")
+
+ if(CMAKE_CXX_STANDARD)
+ set(CMAKE_CXX_EXTENSIONS OFF)
+ set(CMAKE_CXX_STANDARD_REQUIRED ON)
+ endif()
+
+ set(pybind11_system "")
+
+ set_property(GLOBAL PROPERTY USE_FOLDERS ON)
+else()
+ set(PYBIND11_MASTER_PROJECT OFF)
+ set(pybind11_system SYSTEM)
+endif()
+
+# Options
+option(PYBIND11_INSTALL "Install pybind11 header files?" ${PYBIND11_MASTER_PROJECT})
+option(PYBIND11_TEST "Build pybind11 test suite?" ${PYBIND11_MASTER_PROJECT})
+option(PYBIND11_NOPYTHON "Disable search for Python" OFF)
+option(PYBIND11_SIMPLE_GIL_MANAGEMENT
+ "Use simpler GIL management logic that does not support disassociation" OFF)
+set(PYBIND11_INTERNALS_VERSION
+ ""
+ CACHE STRING "Override the ABI version, may be used to enable the unstable ABI.")
+
+if(PYBIND11_SIMPLE_GIL_MANAGEMENT)
+ add_compile_definitions(PYBIND11_SIMPLE_GIL_MANAGEMENT)
+endif()
+
+cmake_dependent_option(
+ USE_PYTHON_INCLUDE_DIR
+ "Install pybind11 headers in Python include directory instead of default installation prefix"
+ OFF "PYBIND11_INSTALL" OFF)
+
+cmake_dependent_option(PYBIND11_FINDPYTHON "Force new FindPython" OFF
+ "NOT CMAKE_VERSION VERSION_LESS 3.12" OFF)
+
+# NB: when adding a header don't forget to also add it to setup.py
+set(PYBIND11_HEADERS
+ include/pybind11/detail/class.h
+ include/pybind11/detail/common.h
+ include/pybind11/detail/descr.h
+ include/pybind11/detail/init.h
+ include/pybind11/detail/internals.h
+ include/pybind11/detail/type_caster_base.h
+ include/pybind11/detail/typeid.h
+ include/pybind11/attr.h
+ include/pybind11/buffer_info.h
+ include/pybind11/cast.h
+ include/pybind11/chrono.h
+ include/pybind11/common.h
+ include/pybind11/complex.h
+ include/pybind11/options.h
+ include/pybind11/eigen.h
+ include/pybind11/eigen/common.h
+ include/pybind11/eigen/matrix.h
+ include/pybind11/eigen/tensor.h
+ include/pybind11/embed.h
+ include/pybind11/eval.h
+ include/pybind11/gil.h
+ include/pybind11/iostream.h
+ include/pybind11/functional.h
+ include/pybind11/numpy.h
+ include/pybind11/operators.h
+ include/pybind11/pybind11.h
+ include/pybind11/pytypes.h
+ include/pybind11/stl.h
+ include/pybind11/stl_bind.h
+ include/pybind11/stl/filesystem.h
+ include/pybind11/type_caster_pyobject_ptr.h)
+
+# Compare with grep and warn if mismatched
+if(PYBIND11_MASTER_PROJECT AND NOT CMAKE_VERSION VERSION_LESS 3.12)
+ file(
+ GLOB_RECURSE _pybind11_header_check
+ LIST_DIRECTORIES false
+ RELATIVE "${CMAKE_CURRENT_SOURCE_DIR}"
+ CONFIGURE_DEPENDS "include/pybind11/*.h")
+ set(_pybind11_here_only ${PYBIND11_HEADERS})
+ set(_pybind11_disk_only ${_pybind11_header_check})
+ list(REMOVE_ITEM _pybind11_here_only ${_pybind11_header_check})
+ list(REMOVE_ITEM _pybind11_disk_only ${PYBIND11_HEADERS})
+ if(_pybind11_here_only)
+ message(AUTHOR_WARNING "PYBIND11_HEADERS has extra files:" ${_pybind11_here_only})
+ endif()
+ if(_pybind11_disk_only)
+ message(AUTHOR_WARNING "PYBIND11_HEADERS is missing files:" ${_pybind11_disk_only})
+ endif()
+endif()
+
+# CMake 3.12 added list(TRANSFORM PREPEND
+# But we can't use it yet
+string(REPLACE "include/" "${CMAKE_CURRENT_SOURCE_DIR}/include/" PYBIND11_HEADERS
+ "${PYBIND11_HEADERS}")
+
+# Cache variable so this can be used in parent projects
+set(pybind11_INCLUDE_DIR
+ "${CMAKE_CURRENT_LIST_DIR}/include"
+ CACHE INTERNAL "Directory where pybind11 headers are located")
+
+# Backward compatible variable for add_subdirectory mode
+if(NOT PYBIND11_MASTER_PROJECT)
+ set(PYBIND11_INCLUDE_DIR
+ "${pybind11_INCLUDE_DIR}"
+ CACHE INTERNAL "")
+endif()
+
+# Note: when creating targets, you cannot use if statements at configure time -
+# you need generator expressions, because those will be placed in the target file.
+# You can also place ifs *in* the Config.in, but not here.
+
+# This section builds targets, but does *not* touch Python
+# Non-IMPORT targets cannot be defined twice
+if(NOT TARGET pybind11_headers)
+ # Build the headers-only target (no Python included):
+ # (long name used here to keep this from clashing in subdirectory mode)
+ add_library(pybind11_headers INTERFACE)
+ add_library(pybind11::pybind11_headers ALIAS pybind11_headers) # to match exported target
+ add_library(pybind11::headers ALIAS pybind11_headers) # easier to use/remember
+
+ target_include_directories(
+ pybind11_headers ${pybind11_system} INTERFACE $
+ $)
+
+ target_compile_features(pybind11_headers INTERFACE cxx_inheriting_constructors cxx_user_literals
+ cxx_right_angle_brackets)
+ if(NOT "${PYBIND11_INTERNALS_VERSION}" STREQUAL "")
+ target_compile_definitions(
+ pybind11_headers INTERFACE "PYBIND11_INTERNALS_VERSION=${PYBIND11_INTERNALS_VERSION}")
+ endif()
+else()
+ # It is invalid to install a target twice, too.
+ set(PYBIND11_INSTALL OFF)
+endif()
+
+include("${CMAKE_CURRENT_SOURCE_DIR}/tools/pybind11Common.cmake")
+# https://github.com/jtojnar/cmake-snips/#concatenating-paths-when-building-pkg-config-files
+# TODO: cmake 3.20 adds the cmake_path() function, which obsoletes this snippet
+include("${CMAKE_CURRENT_SOURCE_DIR}/tools/JoinPaths.cmake")
+
+# Relative directory setting
+if(USE_PYTHON_INCLUDE_DIR AND DEFINED Python_INCLUDE_DIRS)
+ file(RELATIVE_PATH CMAKE_INSTALL_INCLUDEDIR ${CMAKE_INSTALL_PREFIX} ${Python_INCLUDE_DIRS})
+elseif(USE_PYTHON_INCLUDE_DIR AND DEFINED PYTHON_INCLUDE_DIR)
+ file(RELATIVE_PATH CMAKE_INSTALL_INCLUDEDIR ${CMAKE_INSTALL_PREFIX} ${PYTHON_INCLUDE_DIRS})
+endif()
+
+if(PYBIND11_INSTALL)
+ install(DIRECTORY ${pybind11_INCLUDE_DIR}/pybind11 DESTINATION ${CMAKE_INSTALL_INCLUDEDIR})
+ set(PYBIND11_CMAKECONFIG_INSTALL_DIR
+ "${CMAKE_INSTALL_DATAROOTDIR}/cmake/${PROJECT_NAME}"
+ CACHE STRING "install path for pybind11Config.cmake")
+
+ if(IS_ABSOLUTE "${CMAKE_INSTALL_INCLUDEDIR}")
+ set(pybind11_INCLUDEDIR "${CMAKE_INSTALL_FULL_INCLUDEDIR}")
+ else()
+ set(pybind11_INCLUDEDIR "\$\{PACKAGE_PREFIX_DIR\}/${CMAKE_INSTALL_INCLUDEDIR}")
+ endif()
+
+ configure_package_config_file(
+ tools/${PROJECT_NAME}Config.cmake.in "${CMAKE_CURRENT_BINARY_DIR}/${PROJECT_NAME}Config.cmake"
+ INSTALL_DESTINATION ${PYBIND11_CMAKECONFIG_INSTALL_DIR})
+
+ if(CMAKE_VERSION VERSION_LESS 3.14)
+ # Remove CMAKE_SIZEOF_VOID_P from ConfigVersion.cmake since the library does
+ # not depend on architecture specific settings or libraries.
+ set(_PYBIND11_CMAKE_SIZEOF_VOID_P ${CMAKE_SIZEOF_VOID_P})
+ unset(CMAKE_SIZEOF_VOID_P)
+
+ write_basic_package_version_file(
+ ${CMAKE_CURRENT_BINARY_DIR}/${PROJECT_NAME}ConfigVersion.cmake
+ VERSION ${PROJECT_VERSION}
+ COMPATIBILITY AnyNewerVersion)
+
+ set(CMAKE_SIZEOF_VOID_P ${_PYBIND11_CMAKE_SIZEOF_VOID_P})
+ else()
+ # CMake 3.14+ natively supports header-only libraries
+ write_basic_package_version_file(
+ ${CMAKE_CURRENT_BINARY_DIR}/${PROJECT_NAME}ConfigVersion.cmake
+ VERSION ${PROJECT_VERSION}
+ COMPATIBILITY AnyNewerVersion ARCH_INDEPENDENT)
+ endif()
+
+ install(
+ FILES ${CMAKE_CURRENT_BINARY_DIR}/${PROJECT_NAME}Config.cmake
+ ${CMAKE_CURRENT_BINARY_DIR}/${PROJECT_NAME}ConfigVersion.cmake
+ tools/FindPythonLibsNew.cmake
+ tools/pybind11Common.cmake
+ tools/pybind11Tools.cmake
+ tools/pybind11NewTools.cmake
+ DESTINATION ${PYBIND11_CMAKECONFIG_INSTALL_DIR})
+
+ if(NOT PYBIND11_EXPORT_NAME)
+ set(PYBIND11_EXPORT_NAME "${PROJECT_NAME}Targets")
+ endif()
+
+ install(TARGETS pybind11_headers EXPORT "${PYBIND11_EXPORT_NAME}")
+
+ install(
+ EXPORT "${PYBIND11_EXPORT_NAME}"
+ NAMESPACE "pybind11::"
+ DESTINATION ${PYBIND11_CMAKECONFIG_INSTALL_DIR})
+
+ # pkg-config support
+ if(NOT prefix_for_pc_file)
+ set(prefix_for_pc_file "${CMAKE_INSTALL_PREFIX}")
+ endif()
+ join_paths(includedir_for_pc_file "\${prefix}" "${CMAKE_INSTALL_INCLUDEDIR}")
+ configure_file("${CMAKE_CURRENT_SOURCE_DIR}/tools/pybind11.pc.in"
+ "${CMAKE_CURRENT_BINARY_DIR}/pybind11.pc" @ONLY)
+ install(FILES "${CMAKE_CURRENT_BINARY_DIR}/pybind11.pc"
+ DESTINATION "${CMAKE_INSTALL_DATAROOTDIR}/pkgconfig/")
+
+ # Uninstall target
+ if(PYBIND11_MASTER_PROJECT)
+ configure_file("${CMAKE_CURRENT_SOURCE_DIR}/tools/cmake_uninstall.cmake.in"
+ "${CMAKE_CURRENT_BINARY_DIR}/cmake_uninstall.cmake" IMMEDIATE @ONLY)
+
+ add_custom_target(uninstall COMMAND ${CMAKE_COMMAND} -P
+ ${CMAKE_CURRENT_BINARY_DIR}/cmake_uninstall.cmake)
+ endif()
+endif()
+
+# BUILD_TESTING takes priority, but only if this is the master project
+if(PYBIND11_MASTER_PROJECT AND DEFINED BUILD_TESTING)
+ if(BUILD_TESTING)
+ if(_pybind11_nopython)
+ message(FATAL_ERROR "Cannot activate tests in NOPYTHON mode")
+ else()
+ add_subdirectory(tests)
+ endif()
+ endif()
+else()
+ if(PYBIND11_TEST)
+ if(_pybind11_nopython)
+ message(FATAL_ERROR "Cannot activate tests in NOPYTHON mode")
+ else()
+ add_subdirectory(tests)
+ endif()
+ endif()
+endif()
+
+# Better symmetry with find_package(pybind11 CONFIG) mode.
+if(NOT PYBIND11_MASTER_PROJECT)
+ set(pybind11_FOUND
+ TRUE
+ CACHE INTERNAL "True if pybind11 and all required components found on the system")
+endif()
diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/__init__.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/pybind11/__init__.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/pybind11/__init__.py
new file mode 100644
index 0000000..7c10b30
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/pybind11/__init__.py
@@ -0,0 +1,17 @@
+import sys
+
+if sys.version_info < (3, 6): # noqa: UP036
+ msg = "pybind11 does not support Python < 3.6. 2.9 was the last release supporting Python 2.7 and 3.5."
+ raise ImportError(msg)
+
+
+from ._version import __version__, version_info
+from .commands import get_cmake_dir, get_include, get_pkgconfig_dir
+
+__all__ = (
+ "version_info",
+ "__version__",
+ "get_include",
+ "get_cmake_dir",
+ "get_pkgconfig_dir",
+)
diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/pybind11/__main__.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/pybind11/__main__.py
new file mode 100644
index 0000000..180665c
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/pybind11/__main__.py
@@ -0,0 +1,62 @@
+# pylint: disable=missing-function-docstring
+
+import argparse
+import sys
+import sysconfig
+
+from ._version import __version__
+from .commands import get_cmake_dir, get_include, get_pkgconfig_dir
+
+
+def print_includes() -> None:
+ dirs = [
+ sysconfig.get_path("include"),
+ sysconfig.get_path("platinclude"),
+ get_include(),
+ ]
+
+ # Make unique but preserve order
+ unique_dirs = []
+ for d in dirs:
+ if d and d not in unique_dirs:
+ unique_dirs.append(d)
+
+ print(" ".join("-I" + d for d in unique_dirs))
+
+
+def main() -> None:
+ parser = argparse.ArgumentParser()
+ parser.add_argument(
+ "--version",
+ action="version",
+ version=__version__,
+ help="Print the version and exit.",
+ )
+ parser.add_argument(
+ "--includes",
+ action="store_true",
+ help="Include flags for both pybind11 and Python headers.",
+ )
+ parser.add_argument(
+ "--cmakedir",
+ action="store_true",
+ help="Print the CMake module directory, ideal for setting -Dpybind11_ROOT in CMake.",
+ )
+ parser.add_argument(
+ "--pkgconfigdir",
+ action="store_true",
+ help="Print the pkgconfig directory, ideal for setting $PKG_CONFIG_PATH.",
+ )
+ args = parser.parse_args()
+ if not sys.argv[1:]:
+ parser.print_help()
+ if args.includes:
+ print_includes()
+ if args.cmakedir:
+ print(get_cmake_dir())
+ if args.pkgconfigdir:
+ print(get_pkgconfig_dir())
+
+
+if __name__ == "__main__":
+ main()
diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/pybind11/_version.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/pybind11/_version.py
new file mode 100644
index 0000000..9280fa0
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/pybind11/_version.py
@@ -0,0 +1,12 @@
+from typing import Union
+
+
+def _to_int(s: str) -> Union[int, str]:
+ try:
+ return int(s)
+ except ValueError:
+ return s
+
+
+__version__ = "2.11.1"
+version_info = tuple(_to_int(s) for s in __version__.split("."))
diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/pybind11/commands.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/pybind11/commands.py
new file mode 100644
index 0000000..b11690f
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/pybind11/commands.py
@@ -0,0 +1,37 @@
+import os
+
+DIR = os.path.abspath(os.path.dirname(__file__))
+
+
+def get_include(user: bool = False) -> str: # noqa: ARG001
+ """
+ Return the path to the pybind11 include directory. The historical "user"
+ argument is unused, and may be removed.
+ """
+ installed_path = os.path.join(DIR, "include")
+ source_path = os.path.join(os.path.dirname(DIR), "include")
+ return installed_path if os.path.exists(installed_path) else source_path
+
+
+def get_cmake_dir() -> str:
+ """
+ Return the path to the pybind11 CMake module directory.
+ """
+ cmake_installed_path = os.path.join(DIR, "share", "cmake", "pybind11")
+ if os.path.exists(cmake_installed_path):
+ return cmake_installed_path
+
+ msg = "pybind11 not installed, installation required to access the CMake files"
+ raise ImportError(msg)
+
+
+def get_pkgconfig_dir() -> str:
+ """
+ Return the path to the pybind11 pkgconfig directory.
+ """
+ pkgconfig_installed_path = os.path.join(DIR, "share", "pkgconfig")
+ if os.path.exists(pkgconfig_installed_path):
+ return pkgconfig_installed_path
+
+ msg = "pybind11 not installed, installation required to access the pkgconfig files"
+ raise ImportError(msg)
diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/pybind11/py.typed b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/pybind11/py.typed
new file mode 100644
index 0000000..e69de29
diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/pybind11/setup_helpers.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/pybind11/setup_helpers.py
new file mode 100644
index 0000000..aeeee9d
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/pybind11/setup_helpers.py
@@ -0,0 +1,498 @@
+"""
+This module provides helpers for C++11+ projects using pybind11.
+
+LICENSE:
+
+Copyright (c) 2016 Wenzel Jakob , All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are met:
+
+1. Redistributions of source code must retain the above copyright notice, this
+ list of conditions and the following disclaimer.
+
+2. Redistributions in binary form must reproduce the above copyright notice,
+ this list of conditions and the following disclaimer in the documentation
+ and/or other materials provided with the distribution.
+
+3. Neither the name of the copyright holder nor the names of its contributors
+ may be used to endorse or promote products derived from this software
+ without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
+ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
+FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+"""
+
+# IMPORTANT: If you change this file in the pybind11 repo, also review
+# setup_helpers.pyi for matching changes.
+#
+# If you copy this file in, you don't
+# need the .pyi file; it's just an interface file for static type checkers.
+
+import contextlib
+import os
+import platform
+import shlex
+import shutil
+import sys
+import sysconfig
+import tempfile
+import threading
+import warnings
+from functools import lru_cache
+from pathlib import Path
+from typing import (
+ Any,
+ Callable,
+ Dict,
+ Iterable,
+ Iterator,
+ List,
+ Optional,
+ Tuple,
+ TypeVar,
+ Union,
+)
+
+try:
+ from setuptools import Extension as _Extension
+ from setuptools.command.build_ext import build_ext as _build_ext
+except ImportError:
+ from distutils.command.build_ext import build_ext as _build_ext # type: ignore[assignment]
+ from distutils.extension import Extension as _Extension # type: ignore[assignment]
+
+import distutils.ccompiler
+import distutils.errors
+
+WIN = sys.platform.startswith("win32") and "mingw" not in sysconfig.get_platform()
+MACOS = sys.platform.startswith("darwin")
+STD_TMPL = "/std:c++{}" if WIN else "-std=c++{}"
+
+
+# It is recommended to use PEP 518 builds if using this module. However, this
+# file explicitly supports being copied into a user's project directory
+# standalone, and pulling pybind11 with the deprecated setup_requires feature.
+# If you copy the file, remember to add it to your MANIFEST.in, and add the current
+# directory into your path if it sits beside your setup.py.
+
+
+class Pybind11Extension(_Extension):
+ """
+ Build a C++11+ Extension module with pybind11. This automatically adds the
+ recommended flags when you init the extension and assumes C++ sources - you
+ can further modify the options yourself.
+
+ The customizations are:
+
+ * ``/EHsc`` and ``/bigobj`` on Windows
+ * ``stdlib=libc++`` on macOS
+ * ``visibility=hidden`` and ``-g0`` on Unix
+
+ Finally, you can set ``cxx_std`` via constructor or afterwards to enable
+ flags for C++ std, and a few extra helper flags related to the C++ standard
+ level. It is _highly_ recommended you either set this, or use the provided
+ ``build_ext``, which will search for the highest supported extension for
+ you if the ``cxx_std`` property is not set. Do not set the ``cxx_std``
+ property more than once, as flags are added when you set it. Set the
+ property to None to disable the addition of C++ standard flags.
+
+ If you want to add pybind11 headers manually, for example for an exact
+ git checkout, then set ``include_pybind11=False``.
+ """
+
+ # flags are prepended, so that they can be further overridden, e.g. by
+ # ``extra_compile_args=["-g"]``.
+
+ def _add_cflags(self, flags: List[str]) -> None:
+ self.extra_compile_args[:0] = flags
+
+ def _add_ldflags(self, flags: List[str]) -> None:
+ self.extra_link_args[:0] = flags
+
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ self._cxx_level = 0
+ cxx_std = kwargs.pop("cxx_std", 0)
+
+ if "language" not in kwargs:
+ kwargs["language"] = "c++"
+
+ include_pybind11 = kwargs.pop("include_pybind11", True)
+
+ super().__init__(*args, **kwargs)
+
+ # Include the installed package pybind11 headers
+ if include_pybind11:
+ # If using setup_requires, this fails the first time - that's okay
+ try:
+ import pybind11
+
+ pyinc = pybind11.get_include()
+
+ if pyinc not in self.include_dirs:
+ self.include_dirs.append(pyinc)
+ except ModuleNotFoundError:
+ pass
+
+ self.cxx_std = cxx_std
+
+ cflags = []
+ if WIN:
+ cflags += ["/EHsc", "/bigobj"]
+ else:
+ cflags += ["-fvisibility=hidden"]
+ env_cflags = os.environ.get("CFLAGS", "")
+ env_cppflags = os.environ.get("CPPFLAGS", "")
+ c_cpp_flags = shlex.split(env_cflags) + shlex.split(env_cppflags)
+ if not any(opt.startswith("-g") for opt in c_cpp_flags):
+ cflags += ["-g0"]
+ self._add_cflags(cflags)
+
+ @property
+ def cxx_std(self) -> int:
+ """
+ The CXX standard level. If set, will add the required flags. If left at
+ 0, it will trigger an automatic search when pybind11's build_ext is
+ used. If None, will have no effect. Besides just the flags, this may
+ add a macos-min 10.9 or 10.14 flag if MACOSX_DEPLOYMENT_TARGET is
+ unset.
+ """
+ return self._cxx_level
+
+ @cxx_std.setter
+ def cxx_std(self, level: int) -> None:
+ if self._cxx_level:
+ warnings.warn(
+ "You cannot safely change the cxx_level after setting it!", stacklevel=2
+ )
+
+ # MSVC 2015 Update 3 and later only have 14 (and later 17) modes, so
+ # force a valid flag here.
+ if WIN and level == 11:
+ level = 14
+
+ self._cxx_level = level
+
+ if not level:
+ return
+
+ cflags = [STD_TMPL.format(level)]
+ ldflags = []
+
+ if MACOS and "MACOSX_DEPLOYMENT_TARGET" not in os.environ:
+ # C++17 requires a higher min version of macOS. An earlier version
+ # (10.12 or 10.13) can be set manually via environment variable if
+ # you are careful in your feature usage, but 10.14 is the safest
+ # setting for general use. However, never set higher than the
+ # current macOS version!
+ current_macos = tuple(int(x) for x in platform.mac_ver()[0].split(".")[:2])
+ desired_macos = (10, 9) if level < 17 else (10, 14)
+ macos_string = ".".join(str(x) for x in min(current_macos, desired_macos))
+ macosx_min = f"-mmacosx-version-min={macos_string}"
+ cflags += [macosx_min]
+ ldflags += [macosx_min]
+
+ self._add_cflags(cflags)
+ self._add_ldflags(ldflags)
+
+
+# Just in case someone clever tries to multithread
+tmp_chdir_lock = threading.Lock()
+
+
+@contextlib.contextmanager
+def tmp_chdir() -> Iterator[str]:
+ "Prepare and enter a temporary directory, cleanup when done"
+
+ # Threadsafe
+ with tmp_chdir_lock:
+ olddir = os.getcwd()
+ try:
+ tmpdir = tempfile.mkdtemp()
+ os.chdir(tmpdir)
+ yield tmpdir
+ finally:
+ os.chdir(olddir)
+ shutil.rmtree(tmpdir)
+
+
+# cf http://bugs.python.org/issue26689
+def has_flag(compiler: Any, flag: str) -> bool:
+ """
+ Return the flag if a flag name is supported on the
+ specified compiler, otherwise None (can be used as a boolean).
+ If multiple flags are passed, return the first that matches.
+ """
+
+ with tmp_chdir():
+ fname = Path("flagcheck.cpp")
+ # Don't trigger -Wunused-parameter.
+ fname.write_text("int main (int, char **) { return 0; }", encoding="utf-8")
+
+ try:
+ compiler.compile([str(fname)], extra_postargs=[flag])
+ except distutils.errors.CompileError:
+ return False
+ return True
+
+
+# Every call will cache the result
+cpp_flag_cache = None
+
+
+@lru_cache()
+def auto_cpp_level(compiler: Any) -> Union[str, int]:
+ """
+ Return the max supported C++ std level (17, 14, or 11). Returns latest on Windows.
+ """
+
+ if WIN:
+ return "latest"
+
+ levels = [17, 14, 11]
+
+ for level in levels:
+ if has_flag(compiler, STD_TMPL.format(level)):
+ return level
+
+ msg = "Unsupported compiler -- at least C++11 support is needed!"
+ raise RuntimeError(msg)
+
+
+class build_ext(_build_ext): # noqa: N801
+ """
+ Customized build_ext that allows an auto-search for the highest supported
+ C++ level for Pybind11Extension. This is only needed for the auto-search
+ for now, and is completely optional otherwise.
+ """
+
+ def build_extensions(self) -> None:
+ """
+ Build extensions, injecting C++ std for Pybind11Extension if needed.
+ """
+
+ for ext in self.extensions:
+ if hasattr(ext, "_cxx_level") and ext._cxx_level == 0:
+ ext.cxx_std = auto_cpp_level(self.compiler)
+
+ super().build_extensions()
+
+
+def intree_extensions(
+ paths: Iterable[str], package_dir: Optional[Dict[str, str]] = None
+) -> List[Pybind11Extension]:
+ """
+ Generate Pybind11Extensions from source files directly located in a Python
+ source tree.
+
+ ``package_dir`` behaves as in ``setuptools.setup``. If unset, the Python
+ package root parent is determined as the first parent directory that does
+ not contain an ``__init__.py`` file.
+ """
+ exts = []
+
+ if package_dir is None:
+ for path in paths:
+ parent, _ = os.path.split(path)
+ while os.path.exists(os.path.join(parent, "__init__.py")):
+ parent, _ = os.path.split(parent)
+ relname, _ = os.path.splitext(os.path.relpath(path, parent))
+ qualified_name = relname.replace(os.path.sep, ".")
+ exts.append(Pybind11Extension(qualified_name, [path]))
+ return exts
+
+ for path in paths:
+ for prefix, parent in package_dir.items():
+ if path.startswith(parent):
+ relname, _ = os.path.splitext(os.path.relpath(path, parent))
+ qualified_name = relname.replace(os.path.sep, ".")
+ if prefix:
+ qualified_name = prefix + "." + qualified_name
+ exts.append(Pybind11Extension(qualified_name, [path]))
+ break
+ else:
+ msg = (
+ f"path {path} is not a child of any of the directories listed "
+ f"in 'package_dir' ({package_dir})"
+ )
+ raise ValueError(msg)
+
+ return exts
+
+
+def naive_recompile(obj: str, src: str) -> bool:
+ """
+ This will recompile only if the source file changes. It does not check
+ header files, so a more advanced function or Ccache is better if you have
+ editable header files in your package.
+ """
+ return os.stat(obj).st_mtime < os.stat(src).st_mtime
+
+
+def no_recompile(obg: str, src: str) -> bool: # noqa: ARG001
+ """
+ This is the safest but slowest choice (and is the default) - will always
+ recompile sources.
+ """
+ return True
+
+
+S = TypeVar("S", bound="ParallelCompile")
+
+CCompilerMethod = Callable[
+ [
+ distutils.ccompiler.CCompiler,
+ List[str],
+ Optional[str],
+ Optional[Union[Tuple[str], Tuple[str, Optional[str]]]],
+ Optional[List[str]],
+ bool,
+ Optional[List[str]],
+ Optional[List[str]],
+ Optional[List[str]],
+ ],
+ List[str],
+]
+
+
+# Optional parallel compile utility
+# inspired by: http://stackoverflow.com/questions/11013851/speeding-up-build-process-with-distutils
+# and: https://github.com/tbenthompson/cppimport/blob/stable/cppimport/build_module.py
+# and NumPy's parallel distutils module:
+# https://github.com/numpy/numpy/blob/master/numpy/distutils/ccompiler.py
+class ParallelCompile:
+ """
+ Make a parallel compile function. Inspired by
+ numpy.distutils.ccompiler.CCompiler.compile and cppimport.
+
+ This takes several arguments that allow you to customize the compile
+ function created:
+
+ envvar:
+ Set an environment variable to control the compilation threads, like
+ NPY_NUM_BUILD_JOBS
+ default:
+ 0 will automatically multithread, or 1 will only multithread if the
+ envvar is set.
+ max:
+ The limit for automatic multithreading if non-zero
+ needs_recompile:
+ A function of (obj, src) that returns True when recompile is needed. No
+ effect in isolated mode; use ccache instead, see
+ https://github.com/matplotlib/matplotlib/issues/1507/
+
+ To use::
+
+ ParallelCompile("NPY_NUM_BUILD_JOBS").install()
+
+ or::
+
+ with ParallelCompile("NPY_NUM_BUILD_JOBS"):
+ setup(...)
+
+ By default, this assumes all files need to be recompiled. A smarter
+ function can be provided via needs_recompile. If the output has not yet
+ been generated, the compile will always run, and this function is not
+ called.
+ """
+
+ __slots__ = ("envvar", "default", "max", "_old", "needs_recompile")
+
+ def __init__(
+ self,
+ envvar: Optional[str] = None,
+ default: int = 0,
+ max: int = 0, # pylint: disable=redefined-builtin
+ needs_recompile: Callable[[str, str], bool] = no_recompile,
+ ) -> None:
+ self.envvar = envvar
+ self.default = default
+ self.max = max
+ self.needs_recompile = needs_recompile
+ self._old: List[CCompilerMethod] = []
+
+ def function(self) -> CCompilerMethod:
+ """
+ Builds a function object usable as distutils.ccompiler.CCompiler.compile.
+ """
+
+ def compile_function(
+ compiler: distutils.ccompiler.CCompiler,
+ sources: List[str],
+ output_dir: Optional[str] = None,
+ macros: Optional[Union[Tuple[str], Tuple[str, Optional[str]]]] = None,
+ include_dirs: Optional[List[str]] = None,
+ debug: bool = False,
+ extra_preargs: Optional[List[str]] = None,
+ extra_postargs: Optional[List[str]] = None,
+ depends: Optional[List[str]] = None,
+ ) -> Any:
+ # These lines are directly from distutils.ccompiler.CCompiler
+ macros, objects, extra_postargs, pp_opts, build = compiler._setup_compile( # type: ignore[attr-defined]
+ output_dir, macros, include_dirs, sources, depends, extra_postargs
+ )
+ cc_args = compiler._get_cc_args(pp_opts, debug, extra_preargs) # type: ignore[attr-defined]
+
+ # The number of threads; start with default.
+ threads = self.default
+
+ # Determine the number of compilation threads, unless set by an environment variable.
+ if self.envvar is not None:
+ threads = int(os.environ.get(self.envvar, self.default))
+
+ def _single_compile(obj: Any) -> None:
+ try:
+ src, ext = build[obj]
+ except KeyError:
+ return
+
+ if not os.path.exists(obj) or self.needs_recompile(obj, src):
+ compiler._compile(obj, src, ext, cc_args, extra_postargs, pp_opts) # type: ignore[attr-defined]
+
+ try:
+ # Importing .synchronize checks for platforms that have some multiprocessing
+ # capabilities but lack semaphores, such as AWS Lambda and Android Termux.
+ import multiprocessing.synchronize
+ from multiprocessing.pool import ThreadPool
+ except ImportError:
+ threads = 1
+
+ if threads == 0:
+ try:
+ threads = multiprocessing.cpu_count()
+ threads = self.max if self.max and self.max < threads else threads
+ except NotImplementedError:
+ threads = 1
+
+ if threads > 1:
+ with ThreadPool(threads) as pool:
+ for _ in pool.imap_unordered(_single_compile, objects):
+ pass
+ else:
+ for ob in objects:
+ _single_compile(ob)
+
+ return objects
+
+ return compile_function
+
+ def install(self: S) -> S:
+ """
+ Installs the compile function into distutils.ccompiler.CCompiler.compile.
+ """
+ distutils.ccompiler.CCompiler.compile = self.function() # type: ignore[assignment]
+ return self
+
+ def __enter__(self: S) -> S:
+ self._old.append(distutils.ccompiler.CCompiler.compile)
+ return self.install()
+
+ def __exit__(self, *args: Any) -> None:
+ distutils.ccompiler.CCompiler.compile = self._old.pop() # type: ignore[assignment]
diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/tools/FindCatch.cmake b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/tools/FindCatch.cmake
new file mode 100644
index 0000000..5d3fcbf
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/tools/FindCatch.cmake
@@ -0,0 +1,76 @@
+# - Find the Catch test framework or download it (single header)
+#
+# This is a quick module for internal use. It assumes that Catch is
+# REQUIRED and that a minimum version is provided (not EXACT). If
+# a suitable version isn't found locally, the single header file
+# will be downloaded and placed in the build dir: PROJECT_BINARY_DIR.
+#
+# This code sets the following variables:
+# CATCH_INCLUDE_DIR - path to catch.hpp
+# CATCH_VERSION - version number
+
+option(DOWNLOAD_CATCH "Download catch2 if not found")
+
+if(NOT Catch_FIND_VERSION)
+ message(FATAL_ERROR "A version number must be specified.")
+elseif(Catch_FIND_REQUIRED)
+ message(FATAL_ERROR "This module assumes Catch is not required.")
+elseif(Catch_FIND_VERSION_EXACT)
+ message(FATAL_ERROR "Exact version numbers are not supported, only minimum.")
+endif()
+
+# Extract the version number from catch.hpp
+function(_get_catch_version)
+ file(
+ STRINGS "${CATCH_INCLUDE_DIR}/catch.hpp" version_line
+ REGEX "Catch v.*"
+ LIMIT_COUNT 1)
+ if(version_line MATCHES "Catch v([0-9]+)\\.([0-9]+)\\.([0-9]+)")
+ set(CATCH_VERSION
+ "${CMAKE_MATCH_1}.${CMAKE_MATCH_2}.${CMAKE_MATCH_3}"
+ PARENT_SCOPE)
+ endif()
+endfunction()
+
+# Download the single-header version of Catch
+function(_download_catch version destination_dir)
+ message(STATUS "Downloading catch v${version}...")
+ set(url https://github.com/philsquared/Catch/releases/download/v${version}/catch.hpp)
+ file(
+ DOWNLOAD ${url} "${destination_dir}/catch.hpp"
+ STATUS status
+ LOG log)
+ list(GET status 0 error)
+ if(error)
+ string(REPLACE "\n" "\n " log " ${log}")
+ message(FATAL_ERROR "Could not download URL:\n" " ${url}\n" "Log:\n" "${log}")
+ endif()
+ set(CATCH_INCLUDE_DIR
+ "${destination_dir}"
+ CACHE INTERNAL "")
+endfunction()
+
+# Look for catch locally
+find_path(
+ CATCH_INCLUDE_DIR
+ NAMES catch.hpp
+ PATH_SUFFIXES catch2)
+if(CATCH_INCLUDE_DIR)
+ _get_catch_version()
+endif()
+
+# Download the header if it wasn't found or if it's outdated
+if(NOT CATCH_VERSION OR CATCH_VERSION VERSION_LESS ${Catch_FIND_VERSION})
+ if(DOWNLOAD_CATCH)
+ _download_catch(${Catch_FIND_VERSION} "${PROJECT_BINARY_DIR}/catch/")
+ _get_catch_version()
+ else()
+ set(CATCH_FOUND FALSE)
+ return()
+ endif()
+endif()
+
+add_library(Catch2::Catch2 IMPORTED INTERFACE)
+set_property(TARGET Catch2::Catch2 PROPERTY INTERFACE_INCLUDE_DIRECTORIES "${CATCH_INCLUDE_DIR}")
+
+set(CATCH_FOUND TRUE)
diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/tools/FindEigen3.cmake b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/tools/FindEigen3.cmake
new file mode 100644
index 0000000..83625d9
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/tools/FindEigen3.cmake
@@ -0,0 +1,86 @@
+# - Try to find Eigen3 lib
+#
+# This module supports requiring a minimum version, e.g. you can do
+# find_package(Eigen3 3.1.2)
+# to require version 3.1.2 or newer of Eigen3.
+#
+# Once done this will define
+#
+# EIGEN3_FOUND - system has eigen lib with correct version
+# EIGEN3_INCLUDE_DIR - the eigen include directory
+# EIGEN3_VERSION - eigen version
+
+# Copyright (c) 2006, 2007 Montel Laurent,
+# Copyright (c) 2008, 2009 Gael Guennebaud,
+# Copyright (c) 2009 Benoit Jacob
+# Redistribution and use is allowed according to the terms of the 2-clause BSD license.
+
+if(NOT Eigen3_FIND_VERSION)
+ if(NOT Eigen3_FIND_VERSION_MAJOR)
+ set(Eigen3_FIND_VERSION_MAJOR 2)
+ endif(NOT Eigen3_FIND_VERSION_MAJOR)
+ if(NOT Eigen3_FIND_VERSION_MINOR)
+ set(Eigen3_FIND_VERSION_MINOR 91)
+ endif(NOT Eigen3_FIND_VERSION_MINOR)
+ if(NOT Eigen3_FIND_VERSION_PATCH)
+ set(Eigen3_FIND_VERSION_PATCH 0)
+ endif(NOT Eigen3_FIND_VERSION_PATCH)
+
+ set(Eigen3_FIND_VERSION
+ "${Eigen3_FIND_VERSION_MAJOR}.${Eigen3_FIND_VERSION_MINOR}.${Eigen3_FIND_VERSION_PATCH}")
+endif(NOT Eigen3_FIND_VERSION)
+
+macro(_eigen3_check_version)
+ file(READ "${EIGEN3_INCLUDE_DIR}/Eigen/src/Core/util/Macros.h" _eigen3_version_header)
+
+ string(REGEX MATCH "define[ \t]+EIGEN_WORLD_VERSION[ \t]+([0-9]+)" _eigen3_world_version_match
+ "${_eigen3_version_header}")
+ set(EIGEN3_WORLD_VERSION "${CMAKE_MATCH_1}")
+ string(REGEX MATCH "define[ \t]+EIGEN_MAJOR_VERSION[ \t]+([0-9]+)" _eigen3_major_version_match
+ "${_eigen3_version_header}")
+ set(EIGEN3_MAJOR_VERSION "${CMAKE_MATCH_1}")
+ string(REGEX MATCH "define[ \t]+EIGEN_MINOR_VERSION[ \t]+([0-9]+)" _eigen3_minor_version_match
+ "${_eigen3_version_header}")
+ set(EIGEN3_MINOR_VERSION "${CMAKE_MATCH_1}")
+
+ set(EIGEN3_VERSION ${EIGEN3_WORLD_VERSION}.${EIGEN3_MAJOR_VERSION}.${EIGEN3_MINOR_VERSION})
+ if(${EIGEN3_VERSION} VERSION_LESS ${Eigen3_FIND_VERSION})
+ set(EIGEN3_VERSION_OK FALSE)
+ else(${EIGEN3_VERSION} VERSION_LESS ${Eigen3_FIND_VERSION})
+ set(EIGEN3_VERSION_OK TRUE)
+ endif(${EIGEN3_VERSION} VERSION_LESS ${Eigen3_FIND_VERSION})
+
+ if(NOT EIGEN3_VERSION_OK)
+
+ message(STATUS "Eigen3 version ${EIGEN3_VERSION} found in ${EIGEN3_INCLUDE_DIR}, "
+ "but at least version ${Eigen3_FIND_VERSION} is required")
+ endif(NOT EIGEN3_VERSION_OK)
+endmacro(_eigen3_check_version)
+
+if(EIGEN3_INCLUDE_DIR)
+
+ # in cache already
+ _eigen3_check_version()
+ set(EIGEN3_FOUND ${EIGEN3_VERSION_OK})
+
+else(EIGEN3_INCLUDE_DIR)
+ if(NOT DEFINED KDE4_INCLUDE_DIR)
+ set(KDE4_INCLUDE_DIR "")
+ endif()
+
+ find_path(
+ EIGEN3_INCLUDE_DIR
+ NAMES signature_of_eigen3_matrix_library
+ PATHS ${CMAKE_INSTALL_PREFIX}/include ${KDE4_INCLUDE_DIR}
+ PATH_SUFFIXES eigen3 eigen)
+
+ if(EIGEN3_INCLUDE_DIR)
+ _eigen3_check_version()
+ endif(EIGEN3_INCLUDE_DIR)
+
+ include(FindPackageHandleStandardArgs)
+ find_package_handle_standard_args(Eigen3 DEFAULT_MSG EIGEN3_INCLUDE_DIR EIGEN3_VERSION_OK)
+
+ mark_as_advanced(EIGEN3_INCLUDE_DIR)
+
+endif(EIGEN3_INCLUDE_DIR)
diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/tools/FindPythonLibsNew.cmake b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/tools/FindPythonLibsNew.cmake
new file mode 100644
index 0000000..ce558d4
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/tools/FindPythonLibsNew.cmake
@@ -0,0 +1,287 @@
+# - Find python libraries
+# This module finds the libraries corresponding to the Python interpreter
+# FindPythonInterp provides.
+# This code sets the following variables:
+#
+# PYTHONLIBS_FOUND - have the Python libs been found
+# PYTHON_PREFIX - path to the Python installation
+# PYTHON_LIBRARIES - path to the python library
+# PYTHON_INCLUDE_DIRS - path to where Python.h is found
+# PYTHON_MODULE_EXTENSION - lib extension, e.g. '.so' or '.pyd'
+# PYTHON_MODULE_PREFIX - lib name prefix: usually an empty string
+# PYTHON_SITE_PACKAGES - path to installation site-packages
+# PYTHON_IS_DEBUG - whether the Python interpreter is a debug build
+#
+# Thanks to talljimbo for the patch adding the 'LDVERSION' config
+# variable usage.
+
+#=============================================================================
+# Copyright 2001-2009 Kitware, Inc.
+# Copyright 2012 Continuum Analytics, Inc.
+#
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions
+# are met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+#
+# * Redistributions in binary form must reproduce the above copyright
+# notice, this list of conditions and the following disclaimer in the
+# documentation and/or other materials provided with the distribution.
+#
+# * Neither the names of Kitware, Inc., the Insight Software Consortium,
+# nor the names of their contributors may be used to endorse or promote
+# products derived from this software without specific prior written
+# permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#=============================================================================
+
+# Checking for the extension makes sure that `LibsNew` was found and not just `Libs`.
+if(PYTHONLIBS_FOUND AND PYTHON_MODULE_EXTENSION)
+ return()
+endif()
+
+if(PythonLibsNew_FIND_QUIETLY)
+ set(_pythonlibs_quiet QUIET)
+else()
+ set(_pythonlibs_quiet "")
+endif()
+
+if(PythonLibsNew_FIND_REQUIRED)
+ set(_pythonlibs_required REQUIRED)
+endif()
+
+# Check to see if the `python` command is present and from a virtual
+# environment, conda, or GHA activation - if it is, try to use that.
+
+if(NOT DEFINED PYTHON_EXECUTABLE)
+ if(DEFINED ENV{VIRTUAL_ENV})
+ find_program(
+ PYTHON_EXECUTABLE python
+ PATHS "$ENV{VIRTUAL_ENV}" "$ENV{VIRTUAL_ENV}/bin"
+ NO_DEFAULT_PATH)
+ elseif(DEFINED ENV{CONDA_PREFIX})
+ find_program(
+ PYTHON_EXECUTABLE python
+ PATHS "$ENV{CONDA_PREFIX}" "$ENV{CONDA_PREFIX}/bin"
+ NO_DEFAULT_PATH)
+ elseif(DEFINED ENV{pythonLocation})
+ find_program(
+ PYTHON_EXECUTABLE python
+ PATHS "$ENV{pythonLocation}" "$ENV{pythonLocation}/bin"
+ NO_DEFAULT_PATH)
+ endif()
+ if(NOT PYTHON_EXECUTABLE)
+ unset(PYTHON_EXECUTABLE)
+ endif()
+endif()
+
+# Use the Python interpreter to find the libs.
+if(NOT PythonLibsNew_FIND_VERSION)
+ set(PythonLibsNew_FIND_VERSION "3.6")
+endif()
+
+find_package(PythonInterp ${PythonLibsNew_FIND_VERSION} ${_pythonlibs_required}
+ ${_pythonlibs_quiet})
+
+if(NOT PYTHONINTERP_FOUND)
+ set(PYTHONLIBS_FOUND FALSE)
+ set(PythonLibsNew_FOUND FALSE)
+ return()
+endif()
+
+# According to https://stackoverflow.com/questions/646518/python-how-to-detect-debug-interpreter
+# testing whether sys has the gettotalrefcount function is a reliable, cross-platform
+# way to detect a CPython debug interpreter.
+#
+# The library suffix is from the config var LDVERSION sometimes, otherwise
+# VERSION. VERSION will typically be like "2.7" on unix, and "27" on windows.
+execute_process(
+ COMMAND
+ "${PYTHON_EXECUTABLE}" "-c" "
+import sys;import struct;
+import sysconfig as s
+USE_SYSCONFIG = sys.version_info >= (3, 10)
+if not USE_SYSCONFIG:
+ from distutils import sysconfig as ds
+print('.'.join(str(v) for v in sys.version_info));
+print(sys.prefix);
+if USE_SYSCONFIG:
+ scheme = s.get_default_scheme()
+ if scheme == 'posix_local':
+ # Debian's default scheme installs to /usr/local/ but we want to find headers in /usr/
+ scheme = 'posix_prefix'
+ print(s.get_path('platinclude', scheme))
+ print(s.get_path('platlib'))
+ print(s.get_config_var('EXT_SUFFIX') or s.get_config_var('SO'))
+else:
+ print(ds.get_python_inc(plat_specific=True));
+ print(ds.get_python_lib(plat_specific=True));
+ print(ds.get_config_var('EXT_SUFFIX') or ds.get_config_var('SO'));
+print(hasattr(sys, 'gettotalrefcount')+0);
+print(struct.calcsize('@P'));
+print(s.get_config_var('LDVERSION') or s.get_config_var('VERSION'));
+print(s.get_config_var('LIBDIR') or '');
+print(s.get_config_var('MULTIARCH') or '');
+"
+ RESULT_VARIABLE _PYTHON_SUCCESS
+ OUTPUT_VARIABLE _PYTHON_VALUES
+ ERROR_VARIABLE _PYTHON_ERROR_VALUE)
+
+if(NOT _PYTHON_SUCCESS MATCHES 0)
+ if(PythonLibsNew_FIND_REQUIRED)
+ message(FATAL_ERROR "Python config failure:\n${_PYTHON_ERROR_VALUE}")
+ endif()
+ set(PYTHONLIBS_FOUND FALSE)
+ set(PythonLibsNew_FOUND FALSE)
+ return()
+endif()
+
+option(
+ PYBIND11_PYTHONLIBS_OVERWRITE
+ "Overwrite cached values read from Python library (classic search). Turn off if cross-compiling and manually setting these values."
+ ON)
+# Can manually set values when cross-compiling
+macro(_PYBIND11_GET_IF_UNDEF lst index name)
+ if(PYBIND11_PYTHONLIBS_OVERWRITE OR NOT DEFINED "${name}")
+ list(GET "${lst}" "${index}" "${name}")
+ endif()
+endmacro()
+
+# Convert the process output into a list
+if(WIN32)
+ string(REGEX REPLACE "\\\\" "/" _PYTHON_VALUES ${_PYTHON_VALUES})
+endif()
+string(REGEX REPLACE ";" "\\\\;" _PYTHON_VALUES ${_PYTHON_VALUES})
+string(REGEX REPLACE "\n" ";" _PYTHON_VALUES ${_PYTHON_VALUES})
+_pybind11_get_if_undef(_PYTHON_VALUES 0 _PYTHON_VERSION_LIST)
+_pybind11_get_if_undef(_PYTHON_VALUES 1 PYTHON_PREFIX)
+_pybind11_get_if_undef(_PYTHON_VALUES 2 PYTHON_INCLUDE_DIR)
+_pybind11_get_if_undef(_PYTHON_VALUES 3 PYTHON_SITE_PACKAGES)
+_pybind11_get_if_undef(_PYTHON_VALUES 4 PYTHON_MODULE_EXTENSION)
+_pybind11_get_if_undef(_PYTHON_VALUES 5 PYTHON_IS_DEBUG)
+_pybind11_get_if_undef(_PYTHON_VALUES 6 PYTHON_SIZEOF_VOID_P)
+_pybind11_get_if_undef(_PYTHON_VALUES 7 PYTHON_LIBRARY_SUFFIX)
+_pybind11_get_if_undef(_PYTHON_VALUES 8 PYTHON_LIBDIR)
+_pybind11_get_if_undef(_PYTHON_VALUES 9 PYTHON_MULTIARCH)
+
+# Make sure the Python has the same pointer-size as the chosen compiler
+# Skip if CMAKE_SIZEOF_VOID_P is not defined
+# This should be skipped for (non-Apple) cross-compiles (like EMSCRIPTEN)
+if(NOT CMAKE_CROSSCOMPILING
+ AND CMAKE_SIZEOF_VOID_P
+ AND (NOT "${PYTHON_SIZEOF_VOID_P}" STREQUAL "${CMAKE_SIZEOF_VOID_P}"))
+ if(PythonLibsNew_FIND_REQUIRED)
+ math(EXPR _PYTHON_BITS "${PYTHON_SIZEOF_VOID_P} * 8")
+ math(EXPR _CMAKE_BITS "${CMAKE_SIZEOF_VOID_P} * 8")
+ message(FATAL_ERROR "Python config failure: Python is ${_PYTHON_BITS}-bit, "
+ "chosen compiler is ${_CMAKE_BITS}-bit")
+ endif()
+ set(PYTHONLIBS_FOUND FALSE)
+ set(PythonLibsNew_FOUND FALSE)
+ return()
+endif()
+
+# The built-in FindPython didn't always give the version numbers
+string(REGEX REPLACE "\\." ";" _PYTHON_VERSION_LIST ${_PYTHON_VERSION_LIST})
+list(GET _PYTHON_VERSION_LIST 0 PYTHON_VERSION_MAJOR)
+list(GET _PYTHON_VERSION_LIST 1 PYTHON_VERSION_MINOR)
+list(GET _PYTHON_VERSION_LIST 2 PYTHON_VERSION_PATCH)
+set(PYTHON_VERSION "${PYTHON_VERSION_MAJOR}.${PYTHON_VERSION_MINOR}.${PYTHON_VERSION_PATCH}")
+
+# Make sure all directory separators are '/'
+string(REGEX REPLACE "\\\\" "/" PYTHON_PREFIX "${PYTHON_PREFIX}")
+string(REGEX REPLACE "\\\\" "/" PYTHON_INCLUDE_DIR "${PYTHON_INCLUDE_DIR}")
+string(REGEX REPLACE "\\\\" "/" PYTHON_SITE_PACKAGES "${PYTHON_SITE_PACKAGES}")
+
+if(DEFINED PYTHON_LIBRARY)
+ # Don't write to PYTHON_LIBRARY if it's already set
+elseif(CMAKE_HOST_WIN32)
+ set(PYTHON_LIBRARY "${PYTHON_PREFIX}/libs/python${PYTHON_LIBRARY_SUFFIX}.lib")
+
+ # when run in a venv, PYTHON_PREFIX points to it. But the libraries remain in the
+ # original python installation. They may be found relative to PYTHON_INCLUDE_DIR.
+ if(NOT EXISTS "${PYTHON_LIBRARY}")
+ get_filename_component(_PYTHON_ROOT ${PYTHON_INCLUDE_DIR} DIRECTORY)
+ set(PYTHON_LIBRARY "${_PYTHON_ROOT}/libs/python${PYTHON_LIBRARY_SUFFIX}.lib")
+ endif()
+
+ # if we are in MSYS & MINGW, and we didn't find windows python lib, look for system python lib
+ if(DEFINED ENV{MSYSTEM}
+ AND MINGW
+ AND NOT EXISTS "${PYTHON_LIBRARY}")
+ if(PYTHON_MULTIARCH)
+ set(_PYTHON_LIBS_SEARCH "${PYTHON_LIBDIR}/${PYTHON_MULTIARCH}" "${PYTHON_LIBDIR}")
+ else()
+ set(_PYTHON_LIBS_SEARCH "${PYTHON_LIBDIR}")
+ endif()
+ unset(PYTHON_LIBRARY)
+ find_library(
+ PYTHON_LIBRARY
+ NAMES "python${PYTHON_LIBRARY_SUFFIX}"
+ PATHS ${_PYTHON_LIBS_SEARCH}
+ NO_DEFAULT_PATH)
+ endif()
+
+ # raise an error if the python libs are still not found.
+ if(NOT EXISTS "${PYTHON_LIBRARY}")
+ message(FATAL_ERROR "Python libraries not found")
+ endif()
+
+else()
+ if(PYTHON_MULTIARCH)
+ set(_PYTHON_LIBS_SEARCH "${PYTHON_LIBDIR}/${PYTHON_MULTIARCH}" "${PYTHON_LIBDIR}")
+ else()
+ set(_PYTHON_LIBS_SEARCH "${PYTHON_LIBDIR}")
+ endif()
+ #message(STATUS "Searching for Python libs in ${_PYTHON_LIBS_SEARCH}")
+ # Probably this needs to be more involved. It would be nice if the config
+ # information the python interpreter itself gave us were more complete.
+ find_library(
+ PYTHON_LIBRARY
+ NAMES "python${PYTHON_LIBRARY_SUFFIX}"
+ PATHS ${_PYTHON_LIBS_SEARCH}
+ NO_DEFAULT_PATH)
+
+ # If all else fails, just set the name/version and let the linker figure out the path.
+ if(NOT PYTHON_LIBRARY)
+ set(PYTHON_LIBRARY python${PYTHON_LIBRARY_SUFFIX})
+ endif()
+endif()
+
+mark_as_advanced(PYTHON_LIBRARY PYTHON_INCLUDE_DIR)
+
+# We use PYTHON_INCLUDE_DIR, PYTHON_LIBRARY and PYTHON_DEBUG_LIBRARY for the
+# cache entries because they are meant to specify the location of a single
+# library. We now set the variables listed by the documentation for this
+# module.
+set(PYTHON_INCLUDE_DIRS "${PYTHON_INCLUDE_DIR}")
+set(PYTHON_LIBRARIES "${PYTHON_LIBRARY}")
+if(NOT PYTHON_DEBUG_LIBRARY)
+ set(PYTHON_DEBUG_LIBRARY "")
+endif()
+set(PYTHON_DEBUG_LIBRARIES "${PYTHON_DEBUG_LIBRARY}")
+
+find_package_message(PYTHON "Found PythonLibs: ${PYTHON_LIBRARIES}"
+ "${PYTHON_EXECUTABLE}${PYTHON_VERSION_STRING}")
+
+set(PYTHONLIBS_FOUND TRUE)
+set(PythonLibsNew_FOUND TRUE)
+
+if(NOT PYTHON_MODULE_PREFIX)
+ set(PYTHON_MODULE_PREFIX "")
+endif()
diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/tools/JoinPaths.cmake b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/tools/JoinPaths.cmake
new file mode 100644
index 0000000..c68d91b
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/tools/JoinPaths.cmake
@@ -0,0 +1,23 @@
+# This module provides function for joining paths
+# known from most languages
+#
+# SPDX-License-Identifier: (MIT OR CC0-1.0)
+# Copyright 2020 Jan Tojnar
+# https://github.com/jtojnar/cmake-snips
+#
+# Modelled after Python’s os.path.join
+# https://docs.python.org/3.7/library/os.path.html#os.path.join
+# Windows not supported
+function(join_paths joined_path first_path_segment)
+ set(temp_path "${first_path_segment}")
+ foreach(current_segment IN LISTS ARGN)
+ if(NOT ("${current_segment}" STREQUAL ""))
+ if(IS_ABSOLUTE "${current_segment}")
+ set(temp_path "${current_segment}")
+ else()
+ set(temp_path "${temp_path}/${current_segment}")
+ endif()
+ endif()
+ endforeach()
+ set(${joined_path} "${temp_path}" PARENT_SCOPE)
+endfunction()
diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/tools/__init__.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/tools/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/tools/check-style.sh b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/tools/check-style.sh
new file mode 100644
index 0000000..6d83252
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/tools/check-style.sh
@@ -0,0 +1,44 @@
+#!/bin/bash
+#
+# Script to check include/test code for common pybind11 code style errors.
+#
+# This script currently checks for
+#
+# 1. missing space between keyword and parenthesis, e.g.: for(, if(, while(
+# 2. Missing space between right parenthesis and brace, e.g. 'for (...){'
+# 3. opening brace on its own line. It should always be on the same line as the
+# if/while/for/do statement.
+#
+# Invoke as: tools/check-style.sh
+#
+
+check_style_errors=0
+IFS=$'\n'
+
+
+found="$(grep '\<\(if\|for\|while\|catch\)(\|){' "$@" -rn --color=always)"
+if [ -n "$found" ]; then
+ echo -e '\033[31;01mError: found the following coding style problems:\033[0m'
+ check_style_errors=1
+ echo "${found//^/ /}"
+fi
+
+found="$(awk '
+function prefix(filename, lineno) {
+ return " \033[35m" filename "\033[36m:\033[32m" lineno "\033[36m:\033[0m"
+}
+function mark(pattern, string) { sub(pattern, "\033[01;31m&\033[0m", string); return string }
+last && /^\s*{/ {
+ print prefix(FILENAME, FNR-1) mark("\\)\\s*$", last)
+ print prefix(FILENAME, FNR) mark("^\\s*{", $0)
+ last=""
+}
+{ last = /(if|for|while|catch|switch)\s*\(.*\)\s*$/ ? $0 : "" }
+' "$(find include -type f)" "$@")"
+if [ -n "$found" ]; then
+ check_style_errors=1
+ echo -e '\033[31;01mError: braces should occur on the same line as the if/while/.. statement. Found issues in the following files:\033[0m'
+ echo "$found"
+fi
+
+exit $check_style_errors
diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/tools/cmake_uninstall.cmake.in b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/tools/cmake_uninstall.cmake.in
new file mode 100644
index 0000000..1e5d2bb
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/tools/cmake_uninstall.cmake.in
@@ -0,0 +1,23 @@
+# Source: https://gitlab.kitware.com/cmake/community/-/wikis/FAQ#can-i-do-make-uninstall-with-cmake
+
+if(NOT EXISTS "@CMAKE_BINARY_DIR@/install_manifest.txt")
+ message(FATAL_ERROR "Cannot find install manifest: @CMAKE_BINARY_DIR@/install_manifest.txt")
+endif()
+
+file(READ "@CMAKE_BINARY_DIR@/install_manifest.txt" files)
+string(REGEX REPLACE "\n" ";" files "${files}")
+foreach(file ${files})
+ message(STATUS "Uninstalling $ENV{DESTDIR}${file}")
+ if(IS_SYMLINK "$ENV{DESTDIR}${file}" OR EXISTS "$ENV{DESTDIR}${file}")
+ exec_program(
+ "@CMAKE_COMMAND@" ARGS
+ "-E remove \"$ENV{DESTDIR}${file}\""
+ OUTPUT_VARIABLE rm_out
+ RETURN_VALUE rm_retval)
+ if(NOT "${rm_retval}" STREQUAL 0)
+ message(FATAL_ERROR "Problem when removing $ENV{DESTDIR}${file}")
+ endif()
+ else(IS_SYMLINK "$ENV{DESTDIR}${file}" OR EXISTS "$ENV{DESTDIR}${file}")
+ message(STATUS "File $ENV{DESTDIR}${file} does not exist.")
+ endif()
+endforeach()
diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/tools/codespell_ignore_lines_from_errors.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/tools/codespell_ignore_lines_from_errors.py
new file mode 100644
index 0000000..4ec9add
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/tools/codespell_ignore_lines_from_errors.py
@@ -0,0 +1,39 @@
+"""Simple script for rebuilding .codespell-ignore-lines
+
+Usage:
+
+cat < /dev/null > .codespell-ignore-lines
+pre-commit run --all-files codespell >& /tmp/codespell_errors.txt
+python3 tools/codespell_ignore_lines_from_errors.py /tmp/codespell_errors.txt > .codespell-ignore-lines
+
+git diff to review changes, then commit, push.
+"""
+
+import sys
+from typing import List
+
+
+def run(args: List[str]) -> None:
+ assert len(args) == 1, "codespell_errors.txt"
+ cache = {}
+ done = set()
+ with open(args[0]) as f:
+ lines = f.read().splitlines()
+
+ for line in sorted(lines):
+ i = line.find(" ==> ")
+ if i > 0:
+ flds = line[:i].split(":")
+ if len(flds) >= 2:
+ filename, line_num = flds[:2]
+ if filename not in cache:
+ with open(filename) as f:
+ cache[filename] = f.read().splitlines()
+ supp = cache[filename][int(line_num) - 1]
+ if supp not in done:
+ print(supp)
+ done.add(supp)
+
+
+if __name__ == "__main__":
+ run(args=sys.argv[1:])
diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/tools/libsize.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/tools/libsize.py
new file mode 100644
index 0000000..1ac9afb
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/tools/libsize.py
@@ -0,0 +1,36 @@
+import os
+import sys
+
+# Internal build script for generating debugging test .so size.
+# Usage:
+# python libsize.py file.so save.txt -- displays the size of file.so and, if save.txt exists, compares it to the
+# size in it, then overwrites save.txt with the new size for future runs.
+
+if len(sys.argv) != 3:
+ sys.exit("Invalid arguments: usage: python libsize.py file.so save.txt")
+
+lib = sys.argv[1]
+save = sys.argv[2]
+
+if not os.path.exists(lib):
+ sys.exit(f"Error: requested file ({lib}) does not exist")
+
+libsize = os.path.getsize(lib)
+
+print("------", os.path.basename(lib), "file size:", libsize, end="")
+
+if os.path.exists(save):
+ with open(save) as sf:
+ oldsize = int(sf.readline())
+
+ if oldsize > 0:
+ change = libsize - oldsize
+ if change == 0:
+ print(" (no change)")
+ else:
+ print(f" (change of {change:+} bytes = {change / oldsize:+.2%})")
+else:
+ print()
+
+with open(save, "w") as sf:
+ sf.write(str(libsize))
diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/tools/make_changelog.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/tools/make_changelog.py
new file mode 100644
index 0000000..b5bd832
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/tools/make_changelog.py
@@ -0,0 +1,62 @@
+#!/usr/bin/env python3
+
+import re
+
+import ghapi.all
+from rich import print
+from rich.syntax import Syntax
+
+ENTRY = re.compile(
+ r"""
+ Suggested \s changelog \s entry:
+ .*
+ ```rst
+ \s*
+ (.*?)
+ \s*
+ ```
+""",
+ re.DOTALL | re.VERBOSE,
+)
+
+print()
+
+
+api = ghapi.all.GhApi(owner="pybind", repo="pybind11")
+
+issues_pages = ghapi.page.paged(
+ api.issues.list_for_repo, labels="needs changelog", state="closed"
+)
+issues = (issue for page in issues_pages for issue in page)
+missing = []
+
+for issue in issues:
+ changelog = ENTRY.findall(issue.body or "")
+ if not changelog or not changelog[0]:
+ missing.append(issue)
+ else:
+ (msg,) = changelog
+ if not msg.startswith("* "):
+ msg = "* " + msg
+ if not msg.endswith("."):
+ msg += "."
+
+ msg += f"\n `#{issue.number} <{issue.html_url}>`_"
+
+ print(Syntax(msg, "rst", theme="ansi_light", word_wrap=True))
+ print()
+
+if missing:
+ print()
+ print("[blue]" + "-" * 30)
+ print()
+
+ for issue in missing:
+ print(f"[red bold]Missing:[/red bold][red] {issue.title}")
+ print(f"[red] {issue.html_url}\n")
+
+ print("[bold]Template:\n")
+ msg = "## Suggested changelog entry:\n\n```rst\n\n```"
+ print(Syntax(msg, "md", theme="ansi_light"))
+
+print()
diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/tools/pybind11.pc.in b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/tools/pybind11.pc.in
new file mode 100644
index 0000000..402f0b3
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/tools/pybind11.pc.in
@@ -0,0 +1,7 @@
+prefix=@prefix_for_pc_file@
+includedir=@includedir_for_pc_file@
+
+Name: @PROJECT_NAME@
+Description: Seamless operability between C++11 and Python
+Version: @PROJECT_VERSION@
+Cflags: -I${includedir}
diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/tools/pybind11Common.cmake b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/tools/pybind11Common.cmake
new file mode 100644
index 0000000..308d1b7
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/tools/pybind11Common.cmake
@@ -0,0 +1,405 @@
+#[======================================================[.rst
+
+Adds the following targets::
+
+ pybind11::pybind11 - link to headers and pybind11
+ pybind11::module - Adds module links
+ pybind11::embed - Adds embed links
+ pybind11::lto - Link time optimizations (only if CMAKE_INTERPROCEDURAL_OPTIMIZATION is not set)
+ pybind11::thin_lto - Link time optimizations (only if CMAKE_INTERPROCEDURAL_OPTIMIZATION is not set)
+ pybind11::python_link_helper - Adds link to Python libraries
+ pybind11::windows_extras - MSVC bigobj and mp for building multithreaded
+ pybind11::opt_size - avoid optimizations that increase code size
+
+Adds the following functions::
+
+ pybind11_strip(target) - strip target after building on linux/macOS
+ pybind11_find_import(module) - See if a module is installed.
+
+#]======================================================]
+
+# CMake 3.10 has an include_guard command, but we can't use that yet
+# include_guard(global) (pre-CMake 3.10)
+if(TARGET pybind11::pybind11)
+ return()
+endif()
+
+# If we are in subdirectory mode, all IMPORTED targets must be GLOBAL. If we
+# are in CONFIG mode, they should be "normal" targets instead.
+# In CMake 3.11+ you can promote a target to global after you create it,
+# which might be simpler than this check.
+get_property(
+ is_config
+ TARGET pybind11::headers
+ PROPERTY IMPORTED)
+if(NOT is_config)
+ set(optional_global GLOBAL)
+endif()
+
+# If not run in Python mode, we still would like this to at least
+# include pybind11's include directory:
+set(pybind11_INCLUDE_DIRS
+ "${pybind11_INCLUDE_DIR}"
+ CACHE INTERNAL "Include directory for pybind11 (Python not requested)")
+
+# --------------------- Shared targets ----------------------------
+
+# Build an interface library target:
+add_library(pybind11::pybind11 IMPORTED INTERFACE ${optional_global})
+set_property(
+ TARGET pybind11::pybind11
+ APPEND
+ PROPERTY INTERFACE_LINK_LIBRARIES pybind11::headers)
+
+# Build a module target:
+add_library(pybind11::module IMPORTED INTERFACE ${optional_global})
+set_property(
+ TARGET pybind11::module
+ APPEND
+ PROPERTY INTERFACE_LINK_LIBRARIES pybind11::pybind11)
+
+# Build an embed library target:
+add_library(pybind11::embed IMPORTED INTERFACE ${optional_global})
+set_property(
+ TARGET pybind11::embed
+ APPEND
+ PROPERTY INTERFACE_LINK_LIBRARIES pybind11::pybind11)
+
+# --------------------------- link helper ---------------------------
+
+add_library(pybind11::python_link_helper IMPORTED INTERFACE ${optional_global})
+
+if(CMAKE_VERSION VERSION_LESS 3.13)
+ # In CMake 3.11+, you can set INTERFACE properties via the normal methods, and
+ # this would be simpler.
+ set_property(
+ TARGET pybind11::python_link_helper
+ APPEND
+ PROPERTY INTERFACE_LINK_LIBRARIES "$<$:-undefined dynamic_lookup>")
+else()
+ # link_options was added in 3.13+
+ # This is safer, because you are ensured the deduplication pass in CMake will not consider
+ # these separate and remove one but not the other.
+ set_property(
+ TARGET pybind11::python_link_helper
+ APPEND
+ PROPERTY INTERFACE_LINK_OPTIONS "$<$:LINKER:-undefined,dynamic_lookup>")
+endif()
+
+# ------------------------ Windows extras -------------------------
+
+add_library(pybind11::windows_extras IMPORTED INTERFACE ${optional_global})
+
+if(MSVC) # That's also clang-cl
+ # /bigobj is needed for bigger binding projects due to the limit to 64k
+ # addressable sections
+ set_property(
+ TARGET pybind11::windows_extras
+ APPEND
+ PROPERTY INTERFACE_COMPILE_OPTIONS $<$:/bigobj>)
+
+ # /MP enables multithreaded builds (relevant when there are many files) for MSVC
+ if("${CMAKE_CXX_COMPILER_ID}" STREQUAL "MSVC") # no Clang no Intel
+ if(CMAKE_VERSION VERSION_LESS 3.11)
+ set_property(
+ TARGET pybind11::windows_extras
+ APPEND
+ PROPERTY INTERFACE_COMPILE_OPTIONS $<$>:/MP>)
+ else()
+ # Only set these options for C++ files. This is important so that, for
+ # instance, projects that include other types of source files like CUDA
+ # .cu files don't get these options propagated to nvcc since that would
+ # cause the build to fail.
+ set_property(
+ TARGET pybind11::windows_extras
+ APPEND
+ PROPERTY INTERFACE_COMPILE_OPTIONS
+ $<$>:$<$:/MP>>)
+ endif()
+ endif()
+endif()
+
+# ----------------------- Optimize binary size --------------------------
+
+add_library(pybind11::opt_size IMPORTED INTERFACE ${optional_global})
+
+if(MSVC)
+ set(PYBIND11_OPT_SIZE /Os)
+else()
+ set(PYBIND11_OPT_SIZE -Os)
+endif()
+
+set_property(
+ TARGET pybind11::opt_size
+ APPEND
+ PROPERTY INTERFACE_COMPILE_OPTIONS $<$:${PYBIND11_OPT_SIZE}>
+ $<$:${PYBIND11_OPT_SIZE}>
+ $<$:${PYBIND11_OPT_SIZE}>)
+
+# ----------------------- Legacy option --------------------------
+
+# Warn or error if old variable name used
+if(PYBIND11_CPP_STANDARD)
+ string(REGEX MATCH [[..$]] VAL "${PYBIND11_CPP_STANDARD}")
+ if(CMAKE_CXX_STANDARD)
+ if(NOT CMAKE_CXX_STANDARD STREQUAL VAL)
+ message(WARNING "CMAKE_CXX_STANDARD=${CMAKE_CXX_STANDARD} does not match "
+ "PYBIND11_CPP_STANDARD=${PYBIND11_CPP_STANDARD}, "
+ "please remove PYBIND11_CPP_STANDARD from your cache")
+ endif()
+ else()
+ set(supported_standards 11 14 17 20)
+ if("${VAL}" IN_LIST supported_standards)
+ message(WARNING "USE -DCMAKE_CXX_STANDARD=${VAL} instead of PYBIND11_CPP_STANDARD")
+ set(CMAKE_CXX_STANDARD
+ ${VAL}
+ CACHE STRING "From PYBIND11_CPP_STANDARD")
+ else()
+ message(FATAL_ERROR "PYBIND11_CPP_STANDARD should be replaced with CMAKE_CXX_STANDARD "
+ "(last two chars: ${VAL} not understood as a valid CXX std)")
+ endif()
+ endif()
+endif()
+
+# --------------------- Python specifics -------------------------
+
+# CMake 3.27 removes the classic FindPythonInterp if CMP0148 is NEW
+if(CMAKE_VERSION VERSION_LESS "3.27")
+ set(_pybind11_missing_old_python "OLD")
+else()
+ cmake_policy(GET CMP0148 _pybind11_missing_old_python)
+endif()
+
+# Check to see which Python mode we are in, new, old, or no python
+if(PYBIND11_NOPYTHON)
+ set(_pybind11_nopython ON)
+elseif(
+ _pybind11_missing_old_python STREQUAL "NEW"
+ OR PYBIND11_FINDPYTHON
+ OR Python_FOUND
+ OR Python2_FOUND
+ OR Python3_FOUND)
+ # New mode
+ include("${CMAKE_CURRENT_LIST_DIR}/pybind11NewTools.cmake")
+
+else()
+
+ # Classic mode
+ include("${CMAKE_CURRENT_LIST_DIR}/pybind11Tools.cmake")
+
+endif()
+
+# --------------------- pybind11_find_import -------------------------------
+
+if(NOT _pybind11_nopython)
+ # Check to see if modules are importable. Use REQUIRED to force an error if
+ # one of the modules is not found. _FOUND will be set if the
+ # package was found (underscores replace dashes if present). QUIET will hide
+ # the found message, and VERSION will require a minimum version. A successful
+ # find will cache the result.
+ function(pybind11_find_import PYPI_NAME)
+ # CMake variables need underscores (PyPI doesn't care)
+ string(REPLACE "-" "_" NORM_PYPI_NAME "${PYPI_NAME}")
+
+ # Return if found previously
+ if(${NORM_PYPI_NAME}_FOUND)
+ return()
+ endif()
+
+ set(options "REQUIRED;QUIET")
+ set(oneValueArgs "VERSION")
+ cmake_parse_arguments(ARG "${options}" "${oneValueArgs}" "" ${ARGN})
+
+ if(ARG_REQUIRED)
+ set(status_level FATAL_ERROR)
+ else()
+ set(status_level WARNING)
+ endif()
+
+ execute_process(
+ COMMAND
+ ${${_Python}_EXECUTABLE} -c
+ "from pkg_resources import get_distribution; print(get_distribution('${PYPI_NAME}').version)"
+ RESULT_VARIABLE RESULT_PRESENT
+ OUTPUT_VARIABLE PKG_VERSION
+ ERROR_QUIET)
+
+ string(STRIP "${PKG_VERSION}" PKG_VERSION)
+
+ # If a result is present, this failed
+ if(RESULT_PRESENT)
+ set(${NORM_PYPI_NAME}_FOUND
+ ${NORM_PYPI_NAME}-NOTFOUND
+ CACHE INTERNAL "")
+ # Always warn or error
+ message(
+ ${status_level}
+ "Missing: ${PYPI_NAME} ${ARG_VERSION}\nTry: ${${_Python}_EXECUTABLE} -m pip install ${PYPI_NAME}"
+ )
+ else()
+ if(ARG_VERSION AND PKG_VERSION VERSION_LESS ARG_VERSION)
+ message(
+ ${status_level}
+ "Version incorrect: ${PYPI_NAME} ${PKG_VERSION} found, ${ARG_VERSION} required - try upgrading"
+ )
+ else()
+ set(${NORM_PYPI_NAME}_FOUND
+ YES
+ CACHE INTERNAL "")
+ set(${NORM_PYPI_NAME}_VERSION
+ ${PKG_VERSION}
+ CACHE INTERNAL "")
+ endif()
+ if(NOT ARG_QUIET)
+ message(STATUS "Found ${PYPI_NAME} ${PKG_VERSION}")
+ endif()
+ endif()
+ if(NOT ARG_VERSION OR (NOT PKG_VERSION VERSION_LESS ARG_VERSION))
+ # We have successfully found a good version, cache to avoid calling again.
+ endif()
+ endfunction()
+endif()
+
+# --------------------- LTO -------------------------------
+
+include(CheckCXXCompilerFlag)
+
+# Checks whether the given CXX/linker flags can compile and link a cxx file.
+# cxxflags and linkerflags are lists of flags to use. The result variable is a
+# unique variable name for each set of flags: the compilation result will be
+# cached base on the result variable. If the flags work, sets them in
+# cxxflags_out/linkerflags_out internal cache variables (in addition to
+# ${result}).
+function(_pybind11_return_if_cxx_and_linker_flags_work result cxxflags linkerflags cxxflags_out
+ linkerflags_out)
+ set(CMAKE_REQUIRED_LIBRARIES ${linkerflags})
+ check_cxx_compiler_flag("${cxxflags}" ${result})
+ if(${result})
+ set(${cxxflags_out}
+ "${cxxflags}"
+ PARENT_SCOPE)
+ set(${linkerflags_out}
+ "${linkerflags}"
+ PARENT_SCOPE)
+ endif()
+endfunction()
+
+function(_pybind11_generate_lto target prefer_thin_lto)
+ if(MINGW)
+ message(STATUS "${target} disabled (problems with undefined symbols for MinGW for now)")
+ return()
+ endif()
+
+ if(CMAKE_CXX_COMPILER_ID MATCHES "GNU|Clang")
+ set(cxx_append "")
+ set(linker_append "")
+ if(CMAKE_CXX_COMPILER_ID MATCHES "Clang" AND NOT APPLE)
+ # Clang Gold plugin does not support -Os; append -O3 to MinSizeRel builds to override it
+ set(linker_append ";$<$:-O3>")
+ elseif(CMAKE_CXX_COMPILER_ID MATCHES "GNU" AND NOT MINGW)
+ set(cxx_append ";-fno-fat-lto-objects")
+ endif()
+
+ if(CMAKE_SYSTEM_PROCESSOR MATCHES "ppc64le" OR CMAKE_SYSTEM_PROCESSOR MATCHES "mips64")
+ set(NO_FLTO_ARCH TRUE)
+ else()
+ set(NO_FLTO_ARCH FALSE)
+ endif()
+
+ if(CMAKE_CXX_COMPILER_ID MATCHES "Clang"
+ AND prefer_thin_lto
+ AND NOT NO_FLTO_ARCH)
+ _pybind11_return_if_cxx_and_linker_flags_work(
+ HAS_FLTO_THIN "-flto=thin${cxx_append}" "-flto=thin${linker_append}"
+ PYBIND11_LTO_CXX_FLAGS PYBIND11_LTO_LINKER_FLAGS)
+ endif()
+
+ if(NOT HAS_FLTO_THIN AND NOT NO_FLTO_ARCH)
+ _pybind11_return_if_cxx_and_linker_flags_work(
+ HAS_FLTO "-flto${cxx_append}" "-flto${linker_append}" PYBIND11_LTO_CXX_FLAGS
+ PYBIND11_LTO_LINKER_FLAGS)
+ endif()
+ elseif(CMAKE_CXX_COMPILER_ID MATCHES "IntelLLVM")
+ # IntelLLVM equivalent to LTO is called IPO; also IntelLLVM is WIN32/UNIX
+ # WARNING/HELP WANTED: This block of code is currently not covered by pybind11 GitHub Actions!
+ if(WIN32)
+ _pybind11_return_if_cxx_and_linker_flags_work(
+ HAS_INTEL_IPO "-Qipo" "-Qipo" PYBIND11_LTO_CXX_FLAGS PYBIND11_LTO_LINKER_FLAGS)
+ else()
+ _pybind11_return_if_cxx_and_linker_flags_work(
+ HAS_INTEL_IPO "-ipo" "-ipo" PYBIND11_LTO_CXX_FLAGS PYBIND11_LTO_LINKER_FLAGS)
+ endif()
+ elseif(CMAKE_CXX_COMPILER_ID MATCHES "Intel")
+ # Intel equivalent to LTO is called IPO
+ _pybind11_return_if_cxx_and_linker_flags_work(HAS_INTEL_IPO "-ipo" "-ipo"
+ PYBIND11_LTO_CXX_FLAGS PYBIND11_LTO_LINKER_FLAGS)
+ elseif(MSVC)
+ # cmake only interprets libraries as linker flags when they start with a - (otherwise it
+ # converts /LTCG to \LTCG as if it was a Windows path). Luckily MSVC supports passing flags
+ # with - instead of /, even if it is a bit non-standard:
+ _pybind11_return_if_cxx_and_linker_flags_work(HAS_MSVC_GL_LTCG "/GL" "-LTCG"
+ PYBIND11_LTO_CXX_FLAGS PYBIND11_LTO_LINKER_FLAGS)
+ endif()
+
+ # Enable LTO flags if found, except for Debug builds
+ if(PYBIND11_LTO_CXX_FLAGS)
+ # CONFIG takes multiple values in CMake 3.19+, until then we have to use OR
+ set(is_debug "$,$>")
+ set(not_debug "$")
+ set(cxx_lang "$")
+ if(MSVC AND CMAKE_VERSION VERSION_LESS 3.11)
+ set(genex "${not_debug}")
+ else()
+ set(genex "$")
+ endif()
+ set_property(
+ TARGET ${target}
+ APPEND
+ PROPERTY INTERFACE_COMPILE_OPTIONS "$<${genex}:${PYBIND11_LTO_CXX_FLAGS}>")
+ if(CMAKE_PROJECT_NAME STREQUAL "pybind11")
+ message(STATUS "${target} enabled")
+ endif()
+ else()
+ if(CMAKE_PROJECT_NAME STREQUAL "pybind11")
+ message(STATUS "${target} disabled (not supported by the compiler and/or linker)")
+ endif()
+ endif()
+
+ if(PYBIND11_LTO_LINKER_FLAGS)
+ if(CMAKE_VERSION VERSION_LESS 3.11)
+ set_property(
+ TARGET ${target}
+ APPEND
+ PROPERTY INTERFACE_LINK_LIBRARIES "$<${not_debug}:${PYBIND11_LTO_LINKER_FLAGS}>")
+ else()
+ set_property(
+ TARGET ${target}
+ APPEND
+ PROPERTY INTERFACE_LINK_OPTIONS "$<${not_debug}:${PYBIND11_LTO_LINKER_FLAGS}>")
+ endif()
+ endif()
+endfunction()
+
+if(NOT DEFINED CMAKE_INTERPROCEDURAL_OPTIMIZATION)
+ add_library(pybind11::lto IMPORTED INTERFACE ${optional_global})
+ _pybind11_generate_lto(pybind11::lto FALSE)
+
+ add_library(pybind11::thin_lto IMPORTED INTERFACE ${optional_global})
+ _pybind11_generate_lto(pybind11::thin_lto TRUE)
+endif()
+
+# ---------------------- pybind11_strip -----------------------------
+
+function(pybind11_strip target_name)
+ # Strip unnecessary sections of the binary on Linux/macOS
+ if(CMAKE_STRIP)
+ if(APPLE)
+ set(x_opt -x)
+ endif()
+
+ add_custom_command(
+ TARGET ${target_name}
+ POST_BUILD
+ COMMAND ${CMAKE_STRIP} ${x_opt} $)
+ endif()
+endfunction()
diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/tools/pybind11Config.cmake.in b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/tools/pybind11Config.cmake.in
new file mode 100644
index 0000000..5734f43
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/tools/pybind11Config.cmake.in
@@ -0,0 +1,233 @@
+#[=============================================================================[.rst:
+
+pybind11Config.cmake
+####################
+
+Exported variables
+==================
+
+This module sets the following variables in your project:
+
+``pybind11_FOUND``
+ true if pybind11 and all required components found on the system
+``pybind11_VERSION``
+ pybind11 version in format Major.Minor.Release
+``pybind11_VERSION_TYPE``
+ pybind11 version type (``dev*`` or empty for a release)
+``pybind11_INCLUDE_DIRS``
+ Directories where pybind11 and python headers are located.
+``pybind11_INCLUDE_DIR``
+ Directory where pybind11 headers are located.
+``pybind11_DEFINITIONS``
+ Definitions necessary to use pybind11, namely USING_pybind11.
+``pybind11_LIBRARIES``
+ Compile flags and python libraries (as needed) to link against.
+``pybind11_LIBRARY``
+ Empty.
+
+Available components: None
+
+
+Exported targets
+================
+
+If pybind11 is found, this module defines the following ``IMPORTED``
+interface library targets:
+
+``pybind11::module``
+ for extension modules.
+``pybind11::embed``
+ for embedding the Python interpreter.
+
+Python headers, libraries (as needed by platform), and the C++ standard
+are attached to the target.
+
+Advanced targets are also supplied - these are primary for users building
+complex applications, and they are available in all modes:
+
+``pybind11::headers``
+ Just the pybind11 headers and minimum compile requirements.
+``pybind11::pybind11``
+ Python headers too.
+``pybind11::python_link_helper``
+ Just the "linking" part of ``pybind11:module``, for CMake < 3.15.
+``pybind11::thin_lto``
+ An alternative to ``INTERPROCEDURAL_OPTIMIZATION``.
+``pybind11::lto``
+ An alternative to ``INTERPROCEDURAL_OPTIMIZATION`` (also avoids thin LTO on clang).
+``pybind11::windows_extras``
+ Adds bigobj and mp for MSVC.
+
+Modes
+=====
+
+There are two modes provided; classic, which is built on the old Python
+discovery packages in CMake, or the new FindPython mode, which uses FindPython
+from 3.12+ forward (3.15+ _highly_ recommended). If you set the minimum or
+maximum version of CMake to 3.27+, then FindPython is the default (since
+FindPythonInterp/FindPythonLibs has been removed via policy `CMP0148`).
+
+New FindPython mode
+^^^^^^^^^^^^^^^^^^^
+
+To activate this mode, either call ``find_package(Python COMPONENTS Interpreter Development)``
+before finding this package, or set the ``PYBIND11_FINDPYTHON`` variable to ON. In this mode,
+you can either use the basic targets, or use the FindPython tools:
+
+.. code-block:: cmake
+
+ find_package(Python COMPONENTS Interpreter Development)
+ find_package(pybind11 CONFIG)
+
+ # pybind11 method:
+ pybind11_add_module(MyModule1 src1.cpp)
+
+ # Python method:
+ Python_add_library(MyModule2 src2.cpp)
+ target_link_libraries(MyModule2 pybind11::headers)
+ set_target_properties(MyModule2 PROPERTIES
+ INTERPROCEDURAL_OPTIMIZATION ON
+ CXX_VISIBILITY_PRESET ON
+ VISIBILITY_INLINES_HIDDEN ON)
+
+If you build targets yourself, you may be interested in stripping the output
+for reduced size; this is the one other feature that the helper function gives you.
+
+Classic mode
+^^^^^^^^^^^^
+
+Set PythonLibsNew variables to influence python detection and
+CMAKE_CXX_STANDARD to influence standard setting.
+
+.. code-block:: cmake
+
+ find_package(pybind11 CONFIG REQUIRED)
+
+ # Create an extension module
+ add_library(mylib MODULE main.cpp)
+ target_link_libraries(mylib PUBLIC pybind11::module)
+
+ # Or embed the Python interpreter into an executable
+ add_executable(myexe main.cpp)
+ target_link_libraries(myexe PUBLIC pybind11::embed)
+
+
+Hints
+=====
+
+The following variables can be set to guide the search for this package:
+
+``pybind11_DIR``
+ CMake variable, set to directory containing this Config file.
+``CMAKE_PREFIX_PATH``
+ CMake variable, set to root directory of this package.
+``PATH``
+ Environment variable, set to bin directory of this package.
+``CMAKE_DISABLE_FIND_PACKAGE_pybind11``
+ CMake variable, disables ``find_package(pybind11)`` when not ``REQUIRED``,
+ perhaps to force internal build.
+
+Commands
+========
+
+pybind11_add_module
+^^^^^^^^^^^^^^^^^^^
+
+This module defines the following commands to assist with creating Python modules:
+
+.. code-block:: cmake
+
+ pybind11_add_module(
+ [STATIC|SHARED|MODULE]
+ [THIN_LTO] [OPT_SIZE] [NO_EXTRAS] [WITHOUT_SOABI]
+ ...
+ )
+
+Add a module and setup all helpers. You can select the type of the library; the
+default is ``MODULE``. There are several options:
+
+``OPT_SIZE``
+ Optimize for size, even if the ``CMAKE_BUILD_TYPE`` is not ``MinSizeRel``.
+``THIN_LTO``
+ Use thin TLO instead of regular if there's a choice (pybind11's selection
+ is disabled if ``CMAKE_INTERPROCEDURAL_OPTIMIZATIONS`` is set).
+``WITHOUT_SOABI``
+ Disable the SOABI component (``PYBIND11_NEWPYTHON`` mode only).
+``NO_EXTRAS``
+ Disable all extras, exit immediately after making the module.
+
+pybind11_strip
+^^^^^^^^^^^^^^
+
+.. code-block:: cmake
+
+ pybind11_strip()
+
+Strip a target after building it (linux/macOS), called by ``pybind11_add_module``.
+
+pybind11_extension
+^^^^^^^^^^^^^^^^^^
+
+.. code-block:: cmake
+
+ pybind11_extension()
+
+Sets the Python extension name correctly for Python on your platform, called by
+``pybind11_add_module``.
+
+pybind11_find_import(module)
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+.. code-block:: cmake
+
+ pybind11_find_import( [VERSION ] [REQUIRED] [QUIET])
+
+See if a module is installed. Use the registered name (the one on PyPI). You
+can specify a ``VERSION``, and you can specify ``REQUIRED`` or ``QUIET``. Only available if
+``NOPYTHON`` mode is not active. Sets ``module_VERSION`` and ``module_FOUND``. Caches the
+result once a valid install is found.
+
+Suggested usage
+===============
+
+Using ``find_package`` with version info is not recommended except for release versions.
+
+.. code-block:: cmake
+
+ find_package(pybind11 CONFIG)
+ find_package(pybind11 2.9 EXACT CONFIG REQUIRED)
+
+#]=============================================================================]
+@PACKAGE_INIT@
+
+# Location of pybind11/pybind11.h
+# This will be relative unless explicitly set as absolute
+set(pybind11_INCLUDE_DIR "@pybind11_INCLUDEDIR@")
+
+set(pybind11_LIBRARY "")
+set(pybind11_DEFINITIONS USING_pybind11)
+set(pybind11_VERSION_TYPE "@pybind11_VERSION_TYPE@")
+
+check_required_components(pybind11)
+
+if(TARGET pybind11::python_link_helper)
+ # This has already been setup elsewhere, such as with a previous call or
+ # add_subdirectory
+ return()
+endif()
+
+include("${CMAKE_CURRENT_LIST_DIR}/pybind11Targets.cmake")
+
+# Easier to use / remember
+add_library(pybind11::headers IMPORTED INTERFACE)
+set_target_properties(pybind11::headers PROPERTIES INTERFACE_LINK_LIBRARIES
+ pybind11::pybind11_headers)
+
+include("${CMAKE_CURRENT_LIST_DIR}/pybind11Common.cmake")
+
+if(NOT pybind11_FIND_QUIETLY)
+ message(
+ STATUS
+ "Found pybind11: ${pybind11_INCLUDE_DIR} (found version \"${pybind11_VERSION}${pybind11_VERSION_TYPE}\")"
+ )
+endif()
diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/tools/pybind11NewTools.cmake b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/tools/pybind11NewTools.cmake
new file mode 100644
index 0000000..7d7424a
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/tools/pybind11NewTools.cmake
@@ -0,0 +1,256 @@
+# tools/pybind11NewTools.cmake -- Build system for the pybind11 modules
+#
+# Copyright (c) 2020 Wenzel Jakob and Henry Schreiner
+#
+# All rights reserved. Use of this source code is governed by a
+# BSD-style license that can be found in the LICENSE file.
+
+if(CMAKE_VERSION VERSION_LESS 3.12)
+ message(FATAL_ERROR "You cannot use the new FindPython module with CMake < 3.12")
+endif()
+
+include_guard(DIRECTORY)
+
+get_property(
+ is_config
+ TARGET pybind11::headers
+ PROPERTY IMPORTED)
+
+if(pybind11_FIND_QUIETLY)
+ set(_pybind11_quiet QUIET)
+else()
+ set(_pybind11_quiet "")
+endif()
+
+if(NOT Python_FOUND AND NOT Python3_FOUND)
+ if(NOT DEFINED Python_FIND_IMPLEMENTATIONS)
+ set(Python_FIND_IMPLEMENTATIONS CPython PyPy)
+ endif()
+
+ # GitHub Actions like activation
+ if(NOT DEFINED Python_ROOT_DIR AND DEFINED ENV{pythonLocation})
+ set(Python_ROOT_DIR "$ENV{pythonLocation}")
+ endif()
+
+ find_package(Python 3.6 REQUIRED COMPONENTS Interpreter Development ${_pybind11_quiet})
+
+ # If we are in submodule mode, export the Python targets to global targets.
+ # If this behavior is not desired, FindPython _before_ pybind11.
+ if(NOT is_config)
+ set_property(TARGET Python::Python PROPERTY IMPORTED_GLOBAL TRUE)
+ set_property(TARGET Python::Interpreter PROPERTY IMPORTED_GLOBAL TRUE)
+ if(TARGET Python::Module)
+ set_property(TARGET Python::Module PROPERTY IMPORTED_GLOBAL TRUE)
+ endif()
+ endif()
+endif()
+
+if(Python_FOUND)
+ set(_Python
+ Python
+ CACHE INTERNAL "" FORCE)
+elseif(Python3_FOUND)
+ set(_Python
+ Python3
+ CACHE INTERNAL "" FORCE)
+endif()
+
+if(PYBIND11_MASTER_PROJECT)
+ if(${_Python}_INTERPRETER_ID MATCHES "PyPy")
+ message(STATUS "PyPy ${${_Python}_PyPy_VERSION} (Py ${${_Python}_VERSION})")
+ else()
+ message(STATUS "${_Python} ${${_Python}_VERSION}")
+ endif()
+endif()
+
+# If a user finds Python, they may forget to include the Interpreter component
+# and the following two steps require it. It is highly recommended by CMake
+# when finding development libraries anyway, so we will require it.
+if(NOT DEFINED ${_Python}_EXECUTABLE)
+ message(
+ FATAL_ERROR
+ "${_Python} was found without the Interpreter component. Pybind11 requires this component.")
+
+endif()
+
+if(NOT ${_Python}_EXECUTABLE STREQUAL PYBIND11_PYTHON_EXECUTABLE_LAST)
+ # Detect changes to the Python version/binary in subsequent CMake runs, and refresh config if needed
+ unset(PYTHON_IS_DEBUG CACHE)
+ unset(PYTHON_MODULE_EXTENSION CACHE)
+ set(PYBIND11_PYTHON_EXECUTABLE_LAST
+ "${${_Python}_EXECUTABLE}"
+ CACHE INTERNAL "Python executable during the last CMake run")
+endif()
+
+if(NOT DEFINED PYTHON_IS_DEBUG)
+ # Debug check - see https://stackoverflow.com/questions/646518/python-how-to-detect-debug-Interpreter
+ execute_process(
+ COMMAND "${${_Python}_EXECUTABLE}" "-c"
+ "import sys; sys.exit(hasattr(sys, 'gettotalrefcount'))"
+ RESULT_VARIABLE _PYTHON_IS_DEBUG)
+ set(PYTHON_IS_DEBUG
+ "${_PYTHON_IS_DEBUG}"
+ CACHE INTERNAL "Python debug status")
+endif()
+
+# Get the suffix - SO is deprecated, should use EXT_SUFFIX, but this is
+# required for PyPy3 (as of 7.3.1)
+if(NOT DEFINED PYTHON_MODULE_EXTENSION)
+ execute_process(
+ COMMAND
+ "${${_Python}_EXECUTABLE}" "-c"
+ "import sys, importlib; s = importlib.import_module('distutils.sysconfig' if sys.version_info < (3, 10) else 'sysconfig'); print(s.get_config_var('EXT_SUFFIX') or s.get_config_var('SO'))"
+ OUTPUT_VARIABLE _PYTHON_MODULE_EXTENSION
+ ERROR_VARIABLE _PYTHON_MODULE_EXTENSION_ERR
+ OUTPUT_STRIP_TRAILING_WHITESPACE)
+
+ if(_PYTHON_MODULE_EXTENSION STREQUAL "")
+ message(
+ FATAL_ERROR "pybind11 could not query the module file extension, likely the 'distutils'"
+ "package is not installed. Full error message:\n${_PYTHON_MODULE_EXTENSION_ERR}")
+ endif()
+
+ # This needs to be available for the pybind11_extension function
+ set(PYTHON_MODULE_EXTENSION
+ "${_PYTHON_MODULE_EXTENSION}"
+ CACHE INTERNAL "")
+endif()
+
+# Python debug libraries expose slightly different objects before 3.8
+# https://docs.python.org/3.6/c-api/intro.html#debugging-builds
+# https://stackoverflow.com/questions/39161202/how-to-work-around-missing-pymodule-create2-in-amd64-win-python35-d-lib
+if(PYTHON_IS_DEBUG)
+ set_property(
+ TARGET pybind11::pybind11
+ APPEND
+ PROPERTY INTERFACE_COMPILE_DEFINITIONS Py_DEBUG)
+endif()
+
+# Check on every access - since Python can change - do nothing in that case.
+
+if(DEFINED ${_Python}_INCLUDE_DIRS)
+ # Only add Python for build - must be added during the import for config
+ # since it has to be re-discovered.
+ #
+ # This needs to be a target to be included after the local pybind11
+ # directory, just in case there there is an installed pybind11 sitting
+ # next to Python's includes. It also ensures Python is a SYSTEM library.
+ add_library(pybind11::python_headers INTERFACE IMPORTED)
+ set_property(
+ TARGET pybind11::python_headers PROPERTY INTERFACE_INCLUDE_DIRECTORIES
+ "$")
+ set_property(
+ TARGET pybind11::pybind11
+ APPEND
+ PROPERTY INTERFACE_LINK_LIBRARIES pybind11::python_headers)
+ set(pybind11_INCLUDE_DIRS
+ "${pybind11_INCLUDE_DIR}" "${${_Python}_INCLUDE_DIRS}"
+ CACHE INTERNAL "Directories where pybind11 and possibly Python headers are located")
+endif()
+
+# In CMake 3.18+, you can find these separately, so include an if
+if(TARGET ${_Python}::Python)
+ set_property(
+ TARGET pybind11::embed
+ APPEND
+ PROPERTY INTERFACE_LINK_LIBRARIES ${_Python}::Python)
+endif()
+
+# CMake 3.15+ has this
+if(TARGET ${_Python}::Module)
+ set_property(
+ TARGET pybind11::module
+ APPEND
+ PROPERTY INTERFACE_LINK_LIBRARIES ${_Python}::Module)
+else()
+ set_property(
+ TARGET pybind11::module
+ APPEND
+ PROPERTY INTERFACE_LINK_LIBRARIES pybind11::python_link_helper)
+endif()
+
+# WITHOUT_SOABI and WITH_SOABI will disable the custom extension handling used by pybind11.
+# WITH_SOABI is passed on to python_add_library.
+function(pybind11_add_module target_name)
+ cmake_parse_arguments(PARSE_ARGV 1 ARG
+ "STATIC;SHARED;MODULE;THIN_LTO;OPT_SIZE;NO_EXTRAS;WITHOUT_SOABI" "" "")
+
+ if(ARG_STATIC)
+ set(lib_type STATIC)
+ elseif(ARG_SHARED)
+ set(lib_type SHARED)
+ else()
+ set(lib_type MODULE)
+ endif()
+
+ if("${_Python}" STREQUAL "Python")
+ python_add_library(${target_name} ${lib_type} ${ARG_UNPARSED_ARGUMENTS})
+ elseif("${_Python}" STREQUAL "Python3")
+ python3_add_library(${target_name} ${lib_type} ${ARG_UNPARSED_ARGUMENTS})
+ else()
+ message(FATAL_ERROR "Cannot detect FindPython version: ${_Python}")
+ endif()
+
+ target_link_libraries(${target_name} PRIVATE pybind11::headers)
+
+ if(lib_type STREQUAL "MODULE")
+ target_link_libraries(${target_name} PRIVATE pybind11::module)
+ else()
+ target_link_libraries(${target_name} PRIVATE pybind11::embed)
+ endif()
+
+ if(MSVC)
+ target_link_libraries(${target_name} PRIVATE pybind11::windows_extras)
+ endif()
+
+ # -fvisibility=hidden is required to allow multiple modules compiled against
+ # different pybind versions to work properly, and for some features (e.g.
+ # py::module_local). We force it on everything inside the `pybind11`
+ # namespace; also turning it on for a pybind module compilation here avoids
+ # potential warnings or issues from having mixed hidden/non-hidden types.
+ if(NOT DEFINED CMAKE_CXX_VISIBILITY_PRESET)
+ set_target_properties(${target_name} PROPERTIES CXX_VISIBILITY_PRESET "hidden")
+ endif()
+
+ if(NOT DEFINED CMAKE_CUDA_VISIBILITY_PRESET)
+ set_target_properties(${target_name} PROPERTIES CUDA_VISIBILITY_PRESET "hidden")
+ endif()
+
+ # If we don't pass a WITH_SOABI or WITHOUT_SOABI, use our own default handling of extensions
+ if(NOT ARG_WITHOUT_SOABI AND NOT "WITH_SOABI" IN_LIST ARG_UNPARSED_ARGUMENTS)
+ pybind11_extension(${target_name})
+ endif()
+
+ if(ARG_NO_EXTRAS)
+ return()
+ endif()
+
+ if(NOT DEFINED CMAKE_INTERPROCEDURAL_OPTIMIZATION)
+ if(ARG_THIN_LTO)
+ target_link_libraries(${target_name} PRIVATE pybind11::thin_lto)
+ else()
+ target_link_libraries(${target_name} PRIVATE pybind11::lto)
+ endif()
+ endif()
+
+ # Use case-insensitive comparison to match the result of $
+ string(TOUPPER "${CMAKE_BUILD_TYPE}" uppercase_CMAKE_BUILD_TYPE)
+ if(NOT MSVC AND NOT "${uppercase_CMAKE_BUILD_TYPE}" MATCHES DEBUG|RELWITHDEBINFO)
+ # Strip unnecessary sections of the binary on Linux/macOS
+ pybind11_strip(${target_name})
+ endif()
+
+ if(MSVC)
+ target_link_libraries(${target_name} PRIVATE pybind11::windows_extras)
+ endif()
+
+ if(ARG_OPT_SIZE)
+ target_link_libraries(${target_name} PRIVATE pybind11::opt_size)
+ endif()
+endfunction()
+
+function(pybind11_extension name)
+ # The extension is precomputed
+ set_target_properties(${name} PROPERTIES PREFIX "" SUFFIX "${PYTHON_MODULE_EXTENSION}")
+
+endfunction()
diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/tools/pybind11Tools.cmake b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/tools/pybind11Tools.cmake
new file mode 100644
index 0000000..66ad00a
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/tools/pybind11Tools.cmake
@@ -0,0 +1,233 @@
+# tools/pybind11Tools.cmake -- Build system for the pybind11 modules
+#
+# Copyright (c) 2020 Wenzel Jakob
+#
+# All rights reserved. Use of this source code is governed by a
+# BSD-style license that can be found in the LICENSE file.
+
+# include_guard(global) (pre-CMake 3.10)
+if(TARGET pybind11::python_headers)
+ return()
+endif()
+
+# Built-in in CMake 3.5+
+include(CMakeParseArguments)
+
+if(pybind11_FIND_QUIETLY)
+ set(_pybind11_quiet QUIET)
+else()
+ set(_pybind11_quiet "")
+endif()
+
+# If this is the first run, PYTHON_VERSION can stand in for PYBIND11_PYTHON_VERSION
+if(NOT DEFINED PYBIND11_PYTHON_VERSION AND DEFINED PYTHON_VERSION)
+ message(WARNING "Set PYBIND11_PYTHON_VERSION to search for a specific version, not "
+ "PYTHON_VERSION (which is an output). Assuming that is what you "
+ "meant to do and continuing anyway.")
+ set(PYBIND11_PYTHON_VERSION
+ "${PYTHON_VERSION}"
+ CACHE STRING "Python version to use for compiling modules")
+ unset(PYTHON_VERSION)
+ unset(PYTHON_VERSION CACHE)
+elseif(DEFINED PYBIND11_PYTHON_VERSION)
+ # If this is set as a normal variable, promote it
+ set(PYBIND11_PYTHON_VERSION
+ "${PYBIND11_PYTHON_VERSION}"
+ CACHE STRING "Python version to use for compiling modules")
+else()
+ # Make an empty cache variable.
+ set(PYBIND11_PYTHON_VERSION
+ ""
+ CACHE STRING "Python version to use for compiling modules")
+endif()
+
+# A user can set versions manually too
+set(Python_ADDITIONAL_VERSIONS
+ "3.11;3.10;3.9;3.8;3.7;3.6"
+ CACHE INTERNAL "")
+
+list(APPEND CMAKE_MODULE_PATH "${CMAKE_CURRENT_LIST_DIR}")
+find_package(PythonLibsNew ${PYBIND11_PYTHON_VERSION} MODULE REQUIRED ${_pybind11_quiet})
+list(REMOVE_AT CMAKE_MODULE_PATH -1)
+
+# Makes a normal variable a cached variable
+macro(_PYBIND11_PROMOTE_TO_CACHE NAME)
+ set(_tmp_ptc "${${NAME}}")
+ # CMake 3.21 complains if a cached variable is shadowed by a normal one
+ unset(${NAME})
+ set(${NAME}
+ "${_tmp_ptc}"
+ CACHE INTERNAL "")
+endmacro()
+
+# Cache variables so pybind11_add_module can be used in parent projects
+_pybind11_promote_to_cache(PYTHON_INCLUDE_DIRS)
+_pybind11_promote_to_cache(PYTHON_LIBRARIES)
+_pybind11_promote_to_cache(PYTHON_MODULE_PREFIX)
+_pybind11_promote_to_cache(PYTHON_MODULE_EXTENSION)
+_pybind11_promote_to_cache(PYTHON_VERSION_MAJOR)
+_pybind11_promote_to_cache(PYTHON_VERSION_MINOR)
+_pybind11_promote_to_cache(PYTHON_VERSION)
+_pybind11_promote_to_cache(PYTHON_IS_DEBUG)
+
+if(PYBIND11_MASTER_PROJECT)
+ if(PYTHON_MODULE_EXTENSION MATCHES "pypy")
+ if(NOT DEFINED PYPY_VERSION)
+ execute_process(
+ COMMAND ${PYTHON_EXECUTABLE} -c
+ [=[import sys; sys.stdout.write(".".join(map(str, sys.pypy_version_info[:3])))]=]
+ OUTPUT_VARIABLE pypy_version)
+ set(PYPY_VERSION
+ ${pypy_version}
+ CACHE INTERNAL "")
+ endif()
+ message(STATUS "PYPY ${PYPY_VERSION} (Py ${PYTHON_VERSION})")
+ else()
+ message(STATUS "PYTHON ${PYTHON_VERSION}")
+ endif()
+endif()
+
+# Only add Python for build - must be added during the import for config since
+# it has to be re-discovered.
+#
+# This needs to be an target to it is included after the local pybind11
+# directory, just in case there are multiple versions of pybind11, we want the
+# one we expect.
+add_library(pybind11::python_headers INTERFACE IMPORTED)
+set_property(TARGET pybind11::python_headers PROPERTY INTERFACE_INCLUDE_DIRECTORIES
+ "$")
+set_property(
+ TARGET pybind11::pybind11
+ APPEND
+ PROPERTY INTERFACE_LINK_LIBRARIES pybind11::python_headers)
+
+set(pybind11_INCLUDE_DIRS
+ "${pybind11_INCLUDE_DIR}" "${PYTHON_INCLUDE_DIRS}"
+ CACHE INTERNAL "Directories where pybind11 and possibly Python headers are located")
+
+# Python debug libraries expose slightly different objects before 3.8
+# https://docs.python.org/3.6/c-api/intro.html#debugging-builds
+# https://stackoverflow.com/questions/39161202/how-to-work-around-missing-pymodule-create2-in-amd64-win-python35-d-lib
+if(PYTHON_IS_DEBUG)
+ set_property(
+ TARGET pybind11::pybind11
+ APPEND
+ PROPERTY INTERFACE_COMPILE_DEFINITIONS Py_DEBUG)
+endif()
+
+# The <3.11 code here does not support release/debug builds at the same time, like on vcpkg
+if(CMAKE_VERSION VERSION_LESS 3.11)
+ set_property(
+ TARGET pybind11::module
+ APPEND
+ PROPERTY
+ INTERFACE_LINK_LIBRARIES
+ pybind11::python_link_helper
+ "$<$,$>:$>"
+ )
+
+ set_property(
+ TARGET pybind11::embed
+ APPEND
+ PROPERTY INTERFACE_LINK_LIBRARIES pybind11::pybind11 $)
+else()
+ # The IMPORTED INTERFACE library here is to ensure that "debug" and "release" get processed outside
+ # of a generator expression - https://gitlab.kitware.com/cmake/cmake/-/issues/18424, as they are
+ # target_link_library keywords rather than real libraries.
+ add_library(pybind11::_ClassicPythonLibraries IMPORTED INTERFACE)
+ target_link_libraries(pybind11::_ClassicPythonLibraries INTERFACE ${PYTHON_LIBRARIES})
+ target_link_libraries(
+ pybind11::module
+ INTERFACE
+ pybind11::python_link_helper
+ "$<$,$>:pybind11::_ClassicPythonLibraries>")
+
+ target_link_libraries(pybind11::embed INTERFACE pybind11::pybind11
+ pybind11::_ClassicPythonLibraries)
+endif()
+
+function(pybind11_extension name)
+ # The prefix and extension are provided by FindPythonLibsNew.cmake
+ set_target_properties(${name} PROPERTIES PREFIX "${PYTHON_MODULE_PREFIX}"
+ SUFFIX "${PYTHON_MODULE_EXTENSION}")
+endfunction()
+
+# Build a Python extension module:
+# pybind11_add_module( [MODULE | SHARED] [EXCLUDE_FROM_ALL]
+# [NO_EXTRAS] [THIN_LTO] [OPT_SIZE] source1 [source2 ...])
+#
+function(pybind11_add_module target_name)
+ set(options "MODULE;SHARED;EXCLUDE_FROM_ALL;NO_EXTRAS;SYSTEM;THIN_LTO;OPT_SIZE")
+ cmake_parse_arguments(ARG "${options}" "" "" ${ARGN})
+
+ if(ARG_MODULE AND ARG_SHARED)
+ message(FATAL_ERROR "Can't be both MODULE and SHARED")
+ elseif(ARG_SHARED)
+ set(lib_type SHARED)
+ else()
+ set(lib_type MODULE)
+ endif()
+
+ if(ARG_EXCLUDE_FROM_ALL)
+ set(exclude_from_all EXCLUDE_FROM_ALL)
+ else()
+ set(exclude_from_all "")
+ endif()
+
+ add_library(${target_name} ${lib_type} ${exclude_from_all} ${ARG_UNPARSED_ARGUMENTS})
+
+ target_link_libraries(${target_name} PRIVATE pybind11::module)
+
+ if(ARG_SYSTEM)
+ message(
+ STATUS
+ "Warning: this does not have an effect - use NO_SYSTEM_FROM_IMPORTED if using imported targets"
+ )
+ endif()
+
+ pybind11_extension(${target_name})
+
+ # -fvisibility=hidden is required to allow multiple modules compiled against
+ # different pybind versions to work properly, and for some features (e.g.
+ # py::module_local). We force it on everything inside the `pybind11`
+ # namespace; also turning it on for a pybind module compilation here avoids
+ # potential warnings or issues from having mixed hidden/non-hidden types.
+ if(NOT DEFINED CMAKE_CXX_VISIBILITY_PRESET)
+ set_target_properties(${target_name} PROPERTIES CXX_VISIBILITY_PRESET "hidden")
+ endif()
+
+ if(NOT DEFINED CMAKE_CUDA_VISIBILITY_PRESET)
+ set_target_properties(${target_name} PROPERTIES CUDA_VISIBILITY_PRESET "hidden")
+ endif()
+
+ if(ARG_NO_EXTRAS)
+ return()
+ endif()
+
+ if(NOT DEFINED CMAKE_INTERPROCEDURAL_OPTIMIZATION)
+ if(ARG_THIN_LTO)
+ target_link_libraries(${target_name} PRIVATE pybind11::thin_lto)
+ else()
+ target_link_libraries(${target_name} PRIVATE pybind11::lto)
+ endif()
+ endif()
+
+ # Use case-insensitive comparison to match the result of $
+ string(TOUPPER "${CMAKE_BUILD_TYPE}" uppercase_CMAKE_BUILD_TYPE)
+ if(NOT MSVC AND NOT "${uppercase_CMAKE_BUILD_TYPE}" MATCHES DEBUG|RELWITHDEBINFO)
+ pybind11_strip(${target_name})
+ endif()
+
+ if(MSVC)
+ target_link_libraries(${target_name} PRIVATE pybind11::windows_extras)
+ endif()
+
+ if(ARG_OPT_SIZE)
+ target_link_libraries(${target_name} PRIVATE pybind11::opt_size)
+ endif()
+endfunction()
+
+# Provide general way to call common Python commands in "common" file.
+set(_Python
+ PYTHON
+ CACHE INTERNAL "" FORCE)
diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/tools/pyproject.toml b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/tools/pyproject.toml
new file mode 100644
index 0000000..8fe2f47
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/tools/pyproject.toml
@@ -0,0 +1,3 @@
+[build-system]
+requires = ["setuptools>=42", "wheel"]
+build-backend = "setuptools.build_meta"
diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/tools/setup_global.py.in b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/tools/setup_global.py.in
new file mode 100644
index 0000000..885ac5c
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/tools/setup_global.py.in
@@ -0,0 +1,63 @@
+#!/usr/bin/env python3
+
+# Setup script for pybind11-global (in the sdist or in tools/setup_global.py in the repository)
+# This package is targeted for easy use from CMake.
+
+import glob
+import os
+import re
+
+# Setuptools has to be before distutils
+from setuptools import setup
+
+from distutils.command.install_headers import install_headers
+
+class InstallHeadersNested(install_headers):
+ def run(self):
+ headers = self.distribution.headers or []
+ for header in headers:
+ # Remove pybind11/include/
+ short_header = header.split("/", 2)[-1]
+
+ dst = os.path.join(self.install_dir, os.path.dirname(short_header))
+ self.mkpath(dst)
+ (out, _) = self.copy_file(header, dst)
+ self.outfiles.append(out)
+
+
+main_headers = glob.glob("pybind11/include/pybind11/*.h")
+detail_headers = glob.glob("pybind11/include/pybind11/detail/*.h")
+eigen_headers = glob.glob("pybind11/include/pybind11/eigen/*.h")
+stl_headers = glob.glob("pybind11/include/pybind11/stl/*.h")
+cmake_files = glob.glob("pybind11/share/cmake/pybind11/*.cmake")
+pkgconfig_files = glob.glob("pybind11/share/pkgconfig/*.pc")
+headers = main_headers + detail_headers + stl_headers + eigen_headers
+
+cmdclass = {"install_headers": InstallHeadersNested}
+$extra_cmd
+
+# This will _not_ affect installing from wheels,
+# only building wheels or installing from SDist.
+# Primarily intended on Windows, where this is sometimes
+# customized (for example, conda-forge uses Library/)
+base = os.environ.get("PYBIND11_GLOBAL_PREFIX", "")
+
+# Must have a separator
+if base and not base.endswith("/"):
+ base += "/"
+
+setup(
+ name="pybind11_global",
+ version="$version",
+ packages=[],
+ headers=headers,
+ data_files=[
+ (base + "share/cmake/pybind11", cmake_files),
+ (base + "share/pkgconfig", pkgconfig_files),
+ (base + "include/pybind11", main_headers),
+ (base + "include/pybind11/detail", detail_headers),
+ (base + "include/pybind11/eigen", eigen_headers),
+ (base + "include/pybind11/stl", stl_headers),
+ ],
+ cmdclass=cmdclass,
+)
diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/tools/setup_main.py.in b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/tools/setup_main.py.in
new file mode 100644
index 0000000..6358cc7
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_taint_tracking/_vendor/pybind11/tools/setup_main.py.in
@@ -0,0 +1,44 @@
+#!/usr/bin/env python3
+
+# Setup script (in the sdist or in tools/setup_main.py in the repository)
+
+from setuptools import setup
+
+cmdclass = {}
+$extra_cmd
+
+setup(
+ name="pybind11",
+ version="$version",
+ download_url='https://github.com/pybind/pybind11/tarball/v$version',
+ packages=[
+ "pybind11",
+ "pybind11.include.pybind11",
+ "pybind11.include.pybind11.detail",
+ "pybind11.include.pybind11.eigen",
+ "pybind11.include.pybind11.stl",
+ "pybind11.share.cmake.pybind11",
+ "pybind11.share.pkgconfig",
+ ],
+ package_data={
+ "pybind11": ["py.typed"],
+ "pybind11.include.pybind11": ["*.h"],
+ "pybind11.include.pybind11.detail": ["*.h"],
+ "pybind11.include.pybind11.eigen": ["*.h"],
+ "pybind11.include.pybind11.stl": ["*.h"],
+ "pybind11.share.cmake.pybind11": ["*.cmake"],
+ "pybind11.share.pkgconfig": ["*.pc"],
+ },
+ extras_require={
+ "global": ["pybind11_global==$version"]
+ },
+ entry_points={
+ "console_scripts": [
+ "pybind11-config = pybind11.__main__:main",
+ ],
+ "pipx.run": [
+ "pybind11 = pybind11.__main__:main",
+ ]
+ },
+ cmdclass=cmdclass
+)
diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_taint_tracking/aspects.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_taint_tracking/aspects.py
new file mode 100644
index 0000000..f52651d
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_taint_tracking/aspects.py
@@ -0,0 +1,942 @@
+from builtins import bytearray as builtin_bytearray
+from builtins import bytes as builtin_bytes
+from builtins import str as builtin_str
+import codecs
+from types import BuiltinFunctionType
+from typing import TYPE_CHECKING
+from typing import Any
+
+from .._metrics import _set_iast_error_metric
+from .._taint_tracking import TagMappingMode
+from .._taint_tracking import TaintRange
+from .._taint_tracking import _convert_escaped_text_to_tainted_text
+from .._taint_tracking import _format_aspect
+from .._taint_tracking import are_all_text_all_ranges
+from .._taint_tracking import as_formatted_evidence
+from .._taint_tracking import common_replace
+from .._taint_tracking import copy_and_shift_ranges_from_strings
+from .._taint_tracking import copy_ranges_from_strings
+from .._taint_tracking import get_ranges
+from .._taint_tracking import get_tainted_ranges
+from .._taint_tracking import is_pyobject_tainted
+from .._taint_tracking import new_pyobject_id
+from .._taint_tracking import parse_params
+from .._taint_tracking import set_ranges
+from .._taint_tracking import shift_taint_range
+from .._taint_tracking import taint_pyobject_with_ranges
+from .._taint_tracking._native import aspects # noqa: F401
+
+
+if TYPE_CHECKING:
+ from typing import Callable # noqa:F401
+ from typing import Dict # noqa:F401
+ from typing import List # noqa:F401
+ from typing import Optional # noqa:F401
+ from typing import Sequence # noqa:F401
+ from typing import Tuple # noqa:F401
+ from typing import Union # noqa:F401
+
+ TEXT_TYPE = Union[str, bytes, bytearray]
+
+TEXT_TYPES = (str, bytes, bytearray)
+
+
+_add_aspect = aspects.add_aspect
+_extend_aspect = aspects.extend_aspect
+_index_aspect = aspects.index_aspect
+_join_aspect = aspects.join_aspect
+_slice_aspect = aspects.slice_aspect
+
+__all__ = ["add_aspect", "str_aspect", "bytearray_extend_aspect", "decode_aspect", "encode_aspect"]
+
+
+def add_aspect(op1, op2):
+ if not isinstance(op1, TEXT_TYPES) or not isinstance(op2, TEXT_TYPES) or type(op1) != type(op2):
+ return op1 + op2
+ return _add_aspect(op1, op2)
+
+
+def str_aspect(orig_function, flag_added_args, *args, **kwargs):
+ # type: (Optional[Callable], int, Any, Any) -> str
+ if orig_function:
+ if orig_function != builtin_str:
+ if flag_added_args > 0:
+ args = args[flag_added_args:]
+ return orig_function(*args, **kwargs)
+ result = builtin_str(*args, **kwargs)
+ else:
+ result = args[0].str(*args[1:], **kwargs)
+
+ if args and isinstance(args[0], TEXT_TYPES) and is_pyobject_tainted(args[0]):
+ try:
+ if isinstance(args[0], (bytes, bytearray)):
+ encoding = parse_params(1, "encoding", "utf-8", *args, **kwargs)
+ errors = parse_params(2, "errors", "strict", *args, **kwargs)
+ check_offset = args[0].decode(encoding, errors)
+ else:
+ check_offset = args[0]
+ offset = result.index(check_offset)
+ copy_and_shift_ranges_from_strings(args[0], result, offset)
+ except Exception as e:
+ _set_iast_error_metric("IAST propagation error. str_aspect. {}".format(e))
+ return result
+
+
+def bytes_aspect(orig_function, flag_added_args, *args, **kwargs):
+ # type: (Optional[Callable], int, Any, Any) -> bytes
+ if orig_function:
+ if orig_function != builtin_bytes:
+ if flag_added_args > 0:
+ args = args[flag_added_args:]
+ return orig_function(*args, **kwargs)
+ result = builtin_bytes(*args, **kwargs)
+ else:
+ result = args[0].bytes(*args[1:], **kwargs)
+
+ if args and isinstance(args[0], TEXT_TYPES) and is_pyobject_tainted(args[0]):
+ try:
+ copy_ranges_from_strings(args[0], result)
+ except Exception as e:
+ _set_iast_error_metric("IAST propagation error. bytes_aspect. {}".format(e))
+ return result
+
+
+def bytearray_aspect(orig_function, flag_added_args, *args, **kwargs):
+ # type: (Optional[Callable], int, Any, Any) -> bytearray
+ if orig_function:
+ if orig_function != builtin_bytearray:
+ if flag_added_args > 0:
+ args = args[flag_added_args:]
+ return orig_function(*args, **kwargs)
+ result = builtin_bytearray(*args, **kwargs)
+ else:
+ result = args[0].bytearray(*args[1:], **kwargs)
+
+ if args and isinstance(args[0], TEXT_TYPES) and is_pyobject_tainted(args[0]):
+ try:
+ copy_ranges_from_strings(args[0], result)
+ except Exception as e:
+ _set_iast_error_metric("IAST propagation error. bytearray_aspect. {}".format(e))
+ return result
+
+
+def join_aspect(orig_function, flag_added_args, *args, **kwargs):
+ # type: (Optional[Callable], int, Any, Any) -> Any
+ if not orig_function:
+ orig_function = args[0].join
+ if not isinstance(orig_function, BuiltinFunctionType):
+ if flag_added_args > 0:
+ args = args[flag_added_args:]
+ return orig_function(*args, **kwargs)
+
+ if not args:
+ return orig_function(*args, **kwargs)
+
+ joiner = args[0]
+ args = args[flag_added_args:]
+ if not isinstance(joiner, TEXT_TYPES):
+ return joiner.join(*args, **kwargs)
+ try:
+ return _join_aspect(joiner, *args, **kwargs)
+ except Exception as e:
+ _set_iast_error_metric("IAST propagation error. join_aspect. {}".format(e))
+ return joiner.join(*args, **kwargs)
+
+
+def index_aspect(candidate_text, index) -> Any:
+ result = candidate_text[index]
+
+ if not isinstance(candidate_text, TEXT_TYPES) or not isinstance(index, int):
+ return result
+
+ try:
+ return _index_aspect(candidate_text, index)
+ except Exception as e:
+ _set_iast_error_metric("IAST propagation error. index_aspect. {}".format(e))
+ return result
+
+
+def slice_aspect(candidate_text, start, stop, step) -> Any:
+ if (
+ not isinstance(candidate_text, TEXT_TYPES)
+ or (start is not None and not isinstance(start, int))
+ or (stop is not None and not isinstance(stop, int))
+ or (step is not None and not isinstance(step, int))
+ ):
+ return candidate_text[start:stop:step]
+ result = candidate_text[start:stop:step]
+ try:
+ new_result = _slice_aspect(candidate_text, start, stop, step)
+ if new_result != result:
+ raise Exception("Propagation result %r is different to candidate_text[slice] %r" % (new_result, result))
+ return new_result
+ except Exception as e:
+ _set_iast_error_metric("IAST propagation error. slice_aspect. {}".format(e))
+ return result
+
+
+def bytearray_extend_aspect(orig_function, flag_added_args, *args, **kwargs):
+ # type: (Optional[Callable], int, Any, Any) -> Any
+ if orig_function and not isinstance(orig_function, BuiltinFunctionType):
+ if flag_added_args > 0:
+ args = args[flag_added_args:]
+ return orig_function(*args, **kwargs)
+
+ if len(args) < 2:
+ # If we're not receiving at least 2 arguments, means the call was
+ # ``x.extend()`` and not ``x.extend(y)``
+ # so either not the extend we're looking for, or no changes in taint ranges.
+ return args[0].extend(*args[1:], **kwargs)
+
+ op1 = args[0]
+ op2 = args[1]
+ if not isinstance(op1, bytearray) or not isinstance(op2, (bytearray, bytes)):
+ return op1.extend(*args[1:], **kwargs)
+ try:
+ return _extend_aspect(op1, op2)
+ except Exception as e:
+ _set_iast_error_metric("IAST propagation error. extend_aspect. {}".format(e))
+ return op1.extend(op2)
+
+
+def modulo_aspect(candidate_text, candidate_tuple):
+ # type: (Any, Any) -> Any
+ if not isinstance(candidate_text, TEXT_TYPES):
+ return candidate_text % candidate_tuple
+
+ try:
+ if isinstance(candidate_tuple, tuple):
+ parameter_list = candidate_tuple
+ else:
+ parameter_list = (candidate_tuple,)
+
+ ranges_orig, candidate_text_ranges = are_all_text_all_ranges(candidate_text, parameter_list)
+ if not ranges_orig:
+ return candidate_text % candidate_tuple
+
+ return _convert_escaped_text_to_tainted_text(
+ as_formatted_evidence(
+ candidate_text,
+ candidate_text_ranges,
+ tag_mapping_function=TagMappingMode.Mapper,
+ )
+ % tuple(
+ as_formatted_evidence(
+ parameter,
+ tag_mapping_function=TagMappingMode.Mapper,
+ )
+ if isinstance(parameter, TEXT_TYPES)
+ else parameter
+ for parameter in parameter_list
+ ),
+ ranges_orig=ranges_orig,
+ )
+ except Exception as e:
+ _set_iast_error_metric("IAST propagation error. modulo_aspect. {}".format(e))
+ return candidate_text % candidate_tuple
+
+
+def build_string_aspect(*args): # type: (List[Any]) -> str
+ return join_aspect("".join, 1, "", args)
+
+
+def ljust_aspect(orig_function, flag_added_args, *args, **kwargs):
+ # type: (Optional[Callable], int, Any, Any) -> Union[str, bytes, bytearray]
+ if not orig_function:
+ orig_function = args[0].ljust
+ if not isinstance(orig_function, BuiltinFunctionType):
+ if flag_added_args > 0:
+ args = args[flag_added_args:]
+ return orig_function(*args, **kwargs)
+
+ candidate_text = args[0]
+ args = args[flag_added_args:]
+
+ result = candidate_text.ljust(*args, **kwargs)
+
+ if not isinstance(candidate_text, TEXT_TYPES):
+ return result
+
+ try:
+ ranges_new = get_ranges(candidate_text)
+ fillchar = parse_params(1, "fillchar", " ", *args, **kwargs)
+ fillchar_ranges = get_ranges(fillchar)
+ if ranges_new is None or (not ranges_new and not fillchar_ranges):
+ return result
+
+ if fillchar_ranges:
+ # Can only be one char, so we create one range to cover from the start to the end
+ ranges_new = ranges_new + [shift_taint_range(fillchar_ranges[0], len(candidate_text))]
+
+ new_result = candidate_text.ljust(parse_params(0, "width", None, *args, **kwargs), fillchar)
+ taint_pyobject_with_ranges(new_result, ranges_new)
+ return new_result
+ except Exception as e:
+ _set_iast_error_metric("IAST propagation error. ljust_aspect. {}".format(e))
+
+ return result
+
+
+def zfill_aspect(orig_function, flag_added_args, *args, **kwargs):
+ # type: (Optional[Callable], int, Any, Any) -> Any
+ if orig_function and not isinstance(orig_function, BuiltinFunctionType):
+ if flag_added_args > 0:
+ args = args[flag_added_args:]
+ return orig_function(*args, **kwargs)
+
+ candidate_text = args[0]
+ args = args[flag_added_args:]
+
+ result = candidate_text.zfill(*args, **kwargs)
+
+ if not isinstance(candidate_text, TEXT_TYPES):
+ return result
+
+ try:
+ ranges_orig = get_ranges(candidate_text)
+ if not ranges_orig:
+ return result
+ prefix = candidate_text[0] in ("-", "+")
+
+ difflen = len(result) - len(candidate_text)
+ ranges_new = [] # type: List[TaintRange]
+ ranges_new_append = ranges_new.append
+ ranges_new_extend = ranges_new.extend
+
+ for r in ranges_orig:
+ if not prefix or r.start > 0:
+ ranges_new_append(TaintRange(start=r.start + difflen, length=r.length, source=r.source))
+ else:
+ ranges_new_extend(
+ [
+ TaintRange(start=0, length=1, source=r.source),
+ TaintRange(start=r.start + difflen + 1, length=r.length - 1, source=r.source),
+ ]
+ )
+ taint_pyobject_with_ranges(result, tuple(ranges_new))
+ except Exception as e:
+ _set_iast_error_metric("IAST propagation error. format_aspect. {}".format(e))
+
+ return result
+
+
+def format_aspect(
+ orig_function, # type: Optional[Callable]
+ flag_added_args, # type: int
+ *args, # type: Any
+ **kwargs, # type: Dict[str, Any]
+): # type: (...) -> str
+ if not orig_function:
+ orig_function = args[0].format
+
+ if not isinstance(orig_function, BuiltinFunctionType):
+ if flag_added_args > 0:
+ args = args[flag_added_args:]
+ return orig_function(*args, **kwargs)
+
+ if not args:
+ return orig_function(*args, **kwargs)
+
+ candidate_text = args[0] # type: str
+ args = args[flag_added_args:]
+
+ result = candidate_text.format(*args, **kwargs)
+
+ if not isinstance(candidate_text, TEXT_TYPES):
+ return result
+
+ try:
+ params = tuple(args) + tuple(kwargs.values())
+ new_result = _format_aspect(candidate_text, params, *args, **kwargs)
+ if new_result != result:
+ raise Exception("Propagation result %r is different to candidate_text.format %r" % (new_result, result))
+ return new_result
+ except Exception as e:
+ _set_iast_error_metric("IAST propagation error. format_aspect. {}".format(e))
+
+ return result
+
+
+def format_map_aspect(
+ orig_function, flag_added_args, *args, **kwargs
+): # type: (Optional[Callable], int, Any, Any) -> str
+ if orig_function and not isinstance(orig_function, BuiltinFunctionType):
+ if flag_added_args > 0:
+ args = args[flag_added_args:]
+ return orig_function(*args, **kwargs)
+
+ if orig_function and not args:
+ return orig_function(*args, **kwargs)
+
+ candidate_text = args[0] # type: str
+ args = args[flag_added_args:]
+ if not isinstance(candidate_text, TEXT_TYPES):
+ return candidate_text.format_map(*args, **kwargs)
+
+ try:
+ mapping = parse_params(0, "mapping", None, *args, **kwargs)
+ mapping_tuple = tuple(mapping if not isinstance(mapping, dict) else mapping.values())
+ ranges_orig, candidate_text_ranges = are_all_text_all_ranges(
+ candidate_text,
+ args + mapping_tuple,
+ )
+ if not ranges_orig:
+ return candidate_text.format_map(*args, **kwargs)
+
+ return _convert_escaped_text_to_tainted_text(
+ as_formatted_evidence(
+ candidate_text, candidate_text_ranges, tag_mapping_function=TagMappingMode.Mapper
+ ).format_map(
+ {
+ key: as_formatted_evidence(value, tag_mapping_function=TagMappingMode.Mapper)
+ if isinstance(value, TEXT_TYPES)
+ else value
+ for key, value in mapping.items()
+ }
+ ),
+ ranges_orig=ranges_orig,
+ )
+ except Exception as e:
+ _set_iast_error_metric("IAST propagation error. format_map_aspect. {}".format(e))
+ return candidate_text.format_map(*args, **kwargs)
+
+
+def repr_aspect(orig_function, flag_added_args, *args, **kwargs):
+ # type: (Optional[Callable], Any, Any, Any) -> Any
+
+ # DEV: We call this function directly passing None as orig_function
+ if orig_function is not None and not (
+ orig_function is repr or getattr(orig_function, "__name__", None) == "__repr__"
+ ):
+ if flag_added_args > 0:
+ args = args[flag_added_args:]
+ return orig_function(*args, **kwargs)
+
+ result = repr(*args, **kwargs)
+
+ if args and isinstance(args[0], TEXT_TYPES) and is_pyobject_tainted(args[0]):
+ try:
+ if isinstance(args[0], bytes):
+ check_offset = ascii(args[0])[2:-1]
+ elif isinstance(args[0], bytearray):
+ check_offset = ascii(args[0])[12:-2]
+ else:
+ check_offset = args[0]
+ try:
+ offset = result.index(check_offset)
+ except ValueError:
+ offset = 0
+
+ copy_and_shift_ranges_from_strings(args[0], result, offset, len(check_offset))
+ except Exception as e:
+ _set_iast_error_metric("IAST propagation error. repr_aspect. {}".format(e))
+ return result
+
+
+def format_value_aspect(
+ element, # type: Any
+ options=0, # type: int
+ format_spec=None, # type: Optional[str]
+): # type: (...) -> str
+ if options == 115:
+ new_text = str_aspect(str, 0, element)
+ elif options == 114:
+ # TODO: use our repr once we have implemented it
+ new_text = repr_aspect(repr, 0, element)
+ elif options == 97:
+ new_text = ascii(element)
+ else:
+ new_text = element
+ if not isinstance(new_text, TEXT_TYPES):
+ return format(new_text)
+
+ try:
+ if format_spec:
+ # Apply formatting
+ text_ranges = get_tainted_ranges(new_text)
+ if text_ranges:
+ new_new_text = ("{:%s}" % format_spec).format(new_text)
+ try:
+ new_ranges = list()
+ for text_range in text_ranges:
+ new_ranges.append(shift_taint_range(text_range, new_new_text.index(new_text)))
+ if new_ranges:
+ taint_pyobject_with_ranges(new_new_text, tuple(new_ranges))
+ return new_new_text
+ except ValueError:
+ return ("{:%s}" % format_spec).format(new_text)
+ else:
+ return ("{:%s}" % format_spec).format(new_text)
+ else:
+ return str_aspect(str, 0, new_text)
+ except Exception as e:
+ _set_iast_error_metric("IAST propagation error. format_value_aspect. {}".format(e))
+ return new_text
+
+
+def incremental_translation(self, incr_coder, funcode, empty):
+ tainted_ranges = iter(get_tainted_ranges(self))
+ result_list, new_ranges = [], []
+ result_length, i = 0, 0
+ tainted_range = next(tainted_ranges, None)
+ tainted_new_length = 0
+ in_tainted = False
+ tainted_start = 0
+ bytes_iterated = 0
+ try:
+ for i in range(len(self)):
+ if tainted_range is None:
+ # no more tainted ranges, finish decoding all at once
+ new_prod = funcode(self[i:])
+ result_list.append(new_prod)
+ break
+ if i == tainted_range.start:
+ # start new tainted range
+ tainted_start = bytes_iterated
+ tainted_new_length = 0
+ in_tainted = True
+
+ new_prod = funcode(self[i : i + 1])
+ result_list.append(new_prod)
+ result_length += len(new_prod)
+
+ if in_tainted:
+ tainted_new_length += len(new_prod)
+ else:
+ bytes_iterated += len(new_prod)
+
+ if i + 1 == tainted_range.start + tainted_range.length and tainted_new_length > 0:
+ # end range. Do no taint partial multi-bytes character that comes next.
+ new_ranges.append(
+ TaintRange(
+ start=tainted_start,
+ length=tainted_new_length,
+ source=tainted_range.source,
+ )
+ )
+
+ tainted_range = next(tainted_ranges, None)
+ result_list.append(funcode(self[:0], True))
+ except UnicodeDecodeError as e:
+ offset = -len(incr_coder.getstate()[0])
+ raise UnicodeDecodeError(e.args[0], self, i + e.args[2] + offset, i + e.args[3] + offset, *e.args[4:])
+ except UnicodeEncodeError:
+ funcode(self)
+ result = empty.join(result_list)
+ taint_pyobject_with_ranges(result, new_ranges)
+ return result
+
+
+def decode_aspect(orig_function, flag_added_args, *args, **kwargs):
+ if orig_function and (not flag_added_args or not args):
+ # This patch is unexpected, so we fallback
+ # to executing the original function
+ return orig_function(*args, **kwargs)
+
+ self = args[0]
+ args = args[(flag_added_args or 1) :]
+ # Assume we call decode method of the first argument
+ result = self.decode(*args, **kwargs)
+
+ if not is_pyobject_tainted(self) or not isinstance(self, bytes):
+ return result
+
+ try:
+ codec = args[0] if args else "utf-8"
+ inc_dec = codecs.getincrementaldecoder(codec)(**kwargs)
+ return incremental_translation(self, inc_dec, inc_dec.decode, "")
+ except Exception as e:
+ _set_iast_error_metric("IAST propagation error. decode_aspect. {}".format(e))
+ return result
+
+
+def encode_aspect(orig_function, flag_added_args, *args, **kwargs):
+ if orig_function and (not flag_added_args or not args):
+ # This patch is unexpected, so we fallback
+ # to executing the original function
+ return orig_function(*args, **kwargs)
+
+ self = args[0]
+ args = args[(flag_added_args or 1) :]
+ # Assume we call encode method of the first argument
+ result = self.encode(*args, **kwargs)
+
+ if not is_pyobject_tainted(self) or not isinstance(self, str):
+ return result
+
+ try:
+ codec = args[0] if args else "utf-8"
+ inc_enc = codecs.getincrementalencoder(codec)(**kwargs)
+ return incremental_translation(self, inc_enc, inc_enc.encode, b"")
+ except Exception as e:
+ _set_iast_error_metric("IAST propagation error. encode_aspect. {}".format(e))
+ return result
+
+
+def upper_aspect(
+ orig_function, flag_added_args, *args, **kwargs
+): # type: (Optional[Callable], int, Any, Any) -> TEXT_TYPE
+ if orig_function and (not isinstance(orig_function, BuiltinFunctionType) or not args):
+ if flag_added_args > 0:
+ args = args[flag_added_args:]
+ return orig_function(*args, **kwargs)
+
+ candidate_text = args[0]
+ args = args[flag_added_args:]
+ if not isinstance(candidate_text, TEXT_TYPES):
+ return candidate_text.upper(*args, **kwargs)
+
+ try:
+ return common_replace("upper", candidate_text, *args, **kwargs)
+ except Exception as e:
+ _set_iast_error_metric("IAST propagation error. upper_aspect. {}".format(e))
+ return candidate_text.upper(*args, **kwargs)
+
+
+def lower_aspect(
+ orig_function, flag_added_args, *args, **kwargs
+): # type: (Optional[Callable], int, Any, Any) -> TEXT_TYPE
+ if orig_function and (not isinstance(orig_function, BuiltinFunctionType) or not args):
+ if flag_added_args > 0:
+ args = args[flag_added_args:]
+ return orig_function(*args, **kwargs)
+
+ candidate_text = args[0]
+ args = args[flag_added_args:]
+ if not isinstance(candidate_text, TEXT_TYPES):
+ return candidate_text.lower(*args, **kwargs)
+
+ try:
+ return common_replace("lower", candidate_text, *args, **kwargs)
+ except Exception as e:
+ _set_iast_error_metric("IAST propagation error. lower_aspect. {}".format(e))
+ return candidate_text.lower(*args, **kwargs)
+
+
+def _distribute_ranges_and_escape(
+ split_elements, # type: List[Optional[TEXT_TYPE]]
+ len_separator, # type: int
+ ranges, # type: Tuple[TaintRange, ...]
+): # type: (...) -> List[Optional[TEXT_TYPE]]
+ # FIXME: converts to set, and then to list again, probably to remove
+ # duplicates. This should be removed once the ranges values on the
+ # taint dictionary are stored in a set.
+ range_set = set(ranges)
+ range_set_remove = range_set.remove
+ formatted_elements = [] # type: List[Optional[TEXT_TYPE]]
+ formatted_elements_append = formatted_elements.append
+ element_start = 0
+ extra = 0
+
+ for element in split_elements:
+ if element is None:
+ extra += len_separator
+ continue
+ # DEV: If this if is True, it means that the element is part of bytes/bytearray
+ if isinstance(element, int):
+ len_element = 1
+ else:
+ len_element = len(element)
+ element_end = element_start + len_element
+ new_ranges = {} # type: Dict[TaintRange, TaintRange]
+
+ for taint_range in ranges:
+ if (taint_range.start + taint_range.length) <= (element_start + extra):
+ try:
+ range_set_remove(taint_range)
+ except KeyError:
+ # If the range appears twice in ranges, it will be
+ # iterated twice, but it's only once in range_set,
+ # raising KeyError at remove, so it can be safely ignored
+ pass
+ continue
+
+ if taint_range.start > element_end:
+ continue
+
+ start = max(taint_range.start, element_start)
+ end = min((taint_range.start + taint_range.length), element_end)
+ if end <= start:
+ continue
+
+ if end - element_start < 1:
+ continue
+
+ new_range = TaintRange(
+ start=start - element_start,
+ length=end - element_start,
+ source=taint_range.source,
+ )
+ new_ranges[new_range] = taint_range
+
+ element_ranges = tuple(new_ranges.keys())
+ # DEV: If this if is True, it means that the element is part of bytes/bytearray
+ if isinstance(element, int):
+ element_new_id = new_pyobject_id(bytes([element]))
+ else:
+ element_new_id = new_pyobject_id(element)
+ set_ranges(element_new_id, element_ranges)
+
+ formatted_elements_append(
+ as_formatted_evidence(
+ element_new_id,
+ element_ranges,
+ TagMappingMode.Mapper_Replace,
+ new_ranges,
+ )
+ )
+
+ element_start = element_end + len_separator
+ return formatted_elements
+
+
+def aspect_replace_api(
+ candidate_text, old_value, new_value, count, orig_result
+): # type: (Any, Any, Any, int, Any) -> str
+ ranges_orig, candidate_text_ranges = are_all_text_all_ranges(candidate_text, (old_value, new_value))
+ if not ranges_orig: # Ranges in args/kwargs are checked
+ return orig_result
+
+ empty = b"" if isinstance(candidate_text, (bytes, bytearray)) else "" # type: TEXT_TYPE
+
+ if old_value:
+ elements = candidate_text.split(old_value, count) # type: Sequence[TEXT_TYPE]
+ else:
+ if count == -1:
+ elements = (
+ [
+ empty,
+ ]
+ + (
+ list(candidate_text) if isinstance(candidate_text, str) else [bytes([x]) for x in candidate_text] # type: ignore
+ )
+ + [
+ empty,
+ ]
+ )
+ else:
+ if isinstance(candidate_text, str):
+ elements = (
+ [
+ empty,
+ ]
+ + list(candidate_text[: count - 1])
+ + [candidate_text[count - 1 :]]
+ )
+ if len(elements) == count and elements[-1] != "":
+ elements.append(empty)
+ else:
+ elements = (
+ [
+ empty,
+ ]
+ + [bytes([x]) for x in candidate_text[: count - 1]]
+ + [bytes([x for x in candidate_text[count - 1 :]])]
+ )
+ if len(elements) == count and elements[-1] != b"":
+ elements.append(empty)
+ i = 0
+ new_elements = [] # type: List[Optional[TEXT_TYPE]]
+ new_elements_append = new_elements.append
+
+ # if new value is blank, _distribute_ranges_and_escape function doesn't
+ # understand what is the replacement to move the ranges.
+ # In the other hand, Split function splits a string and the occurrence is
+ # in the first or last position, split adds ''. IE:
+ # 'XabcX'.split('X') -> ['', 'abc', '']
+ # We add "None" in the old position and _distribute_ranges_and_escape
+ # knows that this is the position of a old value and move len(old_value)
+ # positions of the range
+ if new_value in ("", b""):
+ len_elements = len(elements)
+ for element in elements:
+ if i == 0 and element in ("", b""):
+ new_elements_append(None)
+ i += 1
+ continue
+ if i + 1 == len_elements and element in ("", b""):
+ new_elements_append(None)
+ continue
+
+ new_elements_append(element)
+
+ if count < 0 and i + 1 < len(elements):
+ new_elements_append(None)
+ elif i >= count and i + 1 < len(elements):
+ new_elements_append(old_value)
+ i += 1
+ else:
+ new_elements = elements # type: ignore
+
+ if candidate_text_ranges:
+ new_elements = _distribute_ranges_and_escape(
+ new_elements,
+ len(old_value),
+ candidate_text_ranges,
+ )
+
+ result_formatted = as_formatted_evidence(new_value, tag_mapping_function=TagMappingMode.Mapper).join(new_elements)
+
+ result = _convert_escaped_text_to_tainted_text(
+ result_formatted,
+ ranges_orig=ranges_orig,
+ )
+
+ return result
+
+
+def replace_aspect(
+ orig_function, flag_added_args, *args, **kwargs
+): # type: (Optional[Callable], int, Any, Any) -> TEXT_TYPE
+ if orig_function and (not isinstance(orig_function, BuiltinFunctionType) or not args):
+ if flag_added_args > 0:
+ args = args[flag_added_args:]
+ return orig_function(*args, **kwargs)
+
+ candidate_text = args[0]
+ args = args[flag_added_args:]
+ orig_result = candidate_text.replace(*args, **kwargs)
+ if not isinstance(candidate_text, TEXT_TYPES):
+ return orig_result
+
+ ###
+ # Optimization: if we're not going to replace, just return the original string
+ count = parse_params(2, "count", -1, *args, **kwargs)
+ if count == 0:
+ return candidate_text
+ ###
+ try:
+ old_value = parse_params(0, "old_value", None, *args, **kwargs)
+ new_value = parse_params(1, "new_value", None, *args, **kwargs)
+
+ if old_value is None or new_value is None:
+ return orig_result
+
+ if old_value not in candidate_text or old_value == new_value:
+ return candidate_text
+
+ if orig_result in ("", b"", bytearray(b"")):
+ return orig_result
+
+ if count < -1:
+ count = -1
+
+ aspect_result = aspect_replace_api(candidate_text, old_value, new_value, count, orig_result)
+
+ if aspect_result != orig_result:
+ return orig_result
+
+ return aspect_result
+ except Exception as e:
+ _set_iast_error_metric("IAST propagation error. replace_aspect. {}".format(e))
+ return orig_result
+
+
+def swapcase_aspect(
+ orig_function, flag_added_args, *args, **kwargs
+): # type: (Optional[Callable], int, Any, Any) -> TEXT_TYPE
+ if orig_function and (not isinstance(orig_function, BuiltinFunctionType) or not args):
+ if flag_added_args > 0:
+ args = args[flag_added_args:]
+ return orig_function(*args, **kwargs)
+
+ candidate_text = args[0]
+ args = args[flag_added_args:]
+ if not isinstance(candidate_text, TEXT_TYPES):
+ return candidate_text.swapcase(*args, **kwargs)
+ try:
+ return common_replace("swapcase", candidate_text, *args, **kwargs)
+ except Exception as e:
+ _set_iast_error_metric("IAST propagation error. swapcase_aspect. {}".format(e))
+ return candidate_text.swapcase(*args, **kwargs)
+
+
+def title_aspect(
+ orig_function, flag_added_args, *args, **kwargs
+): # type: (Optional[Callable], int, Any, Any) -> TEXT_TYPE
+ if orig_function and (not isinstance(orig_function, BuiltinFunctionType) or not args):
+ if flag_added_args > 0:
+ args = args[flag_added_args:]
+ return orig_function(*args, **kwargs)
+
+ candidate_text = args[0]
+ args = args[flag_added_args:]
+ if not isinstance(candidate_text, TEXT_TYPES):
+ return candidate_text.title(*args, **kwargs)
+ try:
+ return common_replace("title", candidate_text, *args, **kwargs)
+ except Exception as e:
+ _set_iast_error_metric("IAST propagation error. title_aspect. {}".format(e))
+ return candidate_text.title(*args, **kwargs)
+
+
+def capitalize_aspect(
+ orig_function, flag_added_args, *args, **kwargs
+): # type: (Optional[Callable], int, Any, Any) -> TEXT_TYPE
+ if orig_function and (not isinstance(orig_function, BuiltinFunctionType) or not args):
+ if flag_added_args > 0:
+ args = args[flag_added_args:]
+ return orig_function(*args, **kwargs)
+
+ candidate_text = args[0]
+ args = args[flag_added_args:]
+ if not isinstance(candidate_text, TEXT_TYPES):
+ return candidate_text.capitalize(*args, **kwargs)
+
+ try:
+ return common_replace("capitalize", candidate_text, *args, **kwargs)
+ except Exception as e:
+ _set_iast_error_metric("IAST propagation error. capitalize_aspect. {}".format(e))
+ return candidate_text.capitalize(*args, **kwargs)
+
+
+def casefold_aspect(
+ orig_function, flag_added_args, *args, **kwargs
+): # type: (Optional[Callable], int, Any, Any) -> TEXT_TYPE
+ if orig_function:
+ if not isinstance(orig_function, BuiltinFunctionType) or not args:
+ if flag_added_args > 0:
+ args = args[flag_added_args:]
+ return orig_function(*args, **kwargs)
+ else:
+ orig_function = getattr(args[0], "casefold", None)
+
+ if orig_function and orig_function.__qualname__ not in ("str.casefold", "bytes.casefold", "bytearray.casefold"):
+ if flag_added_args > 0:
+ args = args[flag_added_args:]
+ return orig_function(*args, **kwargs)
+
+ candidate_text = args[0]
+ args = args[flag_added_args:]
+ if not isinstance(candidate_text, TEXT_TYPES):
+ if flag_added_args > 0:
+ args = args[flag_added_args:]
+ return candidate_text.casefold(*args, **kwargs)
+ try:
+ return common_replace("casefold", candidate_text, *args, **kwargs)
+ except Exception as e:
+ _set_iast_error_metric("IAST propagation error. casefold_aspect. {}".format(e))
+ return candidate_text.casefold(*args, **kwargs) # type: ignore[union-attr]
+
+
+def translate_aspect(
+ orig_function, flag_added_args, *args, **kwargs
+): # type: (Optional[Callable], int, Any, Any) -> TEXT_TYPE
+ if orig_function and (not isinstance(orig_function, BuiltinFunctionType) or not args):
+ if flag_added_args > 0:
+ args = args[flag_added_args:]
+ return orig_function(*args, **kwargs)
+
+ candidate_text = args[0]
+ args = args[flag_added_args:]
+ if not isinstance(candidate_text, TEXT_TYPES):
+ return candidate_text.translate(*args, **kwargs)
+ try:
+ return common_replace("translate", candidate_text, *args, **kwargs)
+ except Exception as e:
+ _set_iast_error_metric("IAST propagation error. translate_aspect. {}".format(e))
+ return candidate_text.translate(*args, **kwargs)
+
+
+def empty_func(*args, **kwargs):
+ pass
diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_taint_tracking/clean.sh b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_taint_tracking/clean.sh
new file mode 100644
index 0000000..c86b825
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_taint_tracking/clean.sh
@@ -0,0 +1,6 @@
+#!/bin/bash
+set -exu
+cd -- "$(dirname -- "${BASH_SOURCE[0]}")" || exit
+
+rm -rf CMakeFiles/ CMakeCache.txt Makefile cmake_install.cmake __pycache__/ .cmake *.cbp Testing
+rm -rf cmake-build-debug cmake-build-default cmake-build-tests
\ No newline at end of file
diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_taint_utils.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_taint_utils.py
new file mode 100644
index 0000000..5beb9c4
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_taint_utils.py
@@ -0,0 +1,548 @@
+#!/usr/bin/env python3
+from collections import abc
+from typing import Any
+from typing import List
+from typing import Optional
+from typing import Union
+
+from ddtrace.internal.logger import get_logger
+from ddtrace.settings.asm import config as asm_config
+
+
+DBAPI_INTEGRATIONS = ("sqlite", "psycopg", "mysql", "mariadb")
+DBAPI_PREFIXES = ("django-",)
+
+log = get_logger(__name__)
+
+
+# Non Lazy Tainting
+
+
+# don't use dataclass that can create circular import problems here
+# @dataclasses.dataclass
+class _DeepTaintCommand:
+ def __init__(
+ self,
+ pre: bool,
+ source_key: str,
+ obj: Any,
+ store_struct: Union[list, dict],
+ key: Optional[List[str]] = None,
+ struct: Optional[Union[list, dict]] = None,
+ is_key: bool = False,
+ ):
+ self.pre = pre
+ self.source_key = source_key
+ self.obj = obj
+ self.store_struct = store_struct
+ self.key = key
+ self.struct = struct
+ self.is_key = is_key
+
+ def store(self, value):
+ if isinstance(self.store_struct, list):
+ self.store_struct.append(value)
+ elif isinstance(self.store_struct, dict):
+ key = self.key[0] if self.key else None
+ self.store_struct[key] = value
+ else:
+ raise ValueError(f"store_struct of type {type(self.store_struct)}")
+
+ def post(self, struct):
+ return self.__class__(False, self.source_key, self.obj, self.store_struct, self.key, struct)
+
+
+def build_new_tainted_object_from_generic_object(initial_object, wanted_object):
+ if initial_object.__class__ is wanted_object.__class__:
+ return wanted_object
+ #### custom tailor actions
+ wanted_type = initial_object.__class__.__module__, initial_object.__class__.__name__
+ if wanted_type == ("builtins", "tuple"):
+ return tuple(wanted_object)
+ # Django
+ if wanted_type == ("django.http.request", "HttpHeaders"):
+ res = initial_object.__class__({})
+ res._store = {k.lower(): (k, v) for k, v in wanted_object.items()}
+ return res
+ if wanted_type == ("django.http.request", "QueryDict"):
+ res = initial_object.__class__()
+ for k, v in wanted_object.items():
+ dict.__setitem__(res, k, v)
+ return res
+ # Flask 2+
+ if wanted_type == ("werkzeug.datastructures.structures", "ImmutableMultiDict"):
+ return initial_object.__class__(wanted_object)
+ # Flask 1
+ if wanted_type == ("werkzeug.datastructures", "ImmutableMultiDict"):
+ return initial_object.__class__(wanted_object)
+
+ # if the class is unknown, return the initial object
+ # this may prevent interned string to be tainted but ensure
+ # that normal behavior of the code is not changed.
+ return initial_object
+
+
+def taint_structure(main_obj, source_key, source_value, override_pyobject_tainted=False):
+ """taint any structured object
+ use a queue like mechanism to avoid recursion
+ Best effort: mutate mutable structures and rebuild immutable ones if possible
+ """
+ from ._taint_tracking import is_pyobject_tainted
+ from ._taint_tracking import taint_pyobject
+
+ if not main_obj:
+ return main_obj
+
+ main_res = []
+ try:
+ # fifo contains tuple (pre/post:bool, source key, object to taint,
+ # key to use, struct to store result, struct to )
+ stack = [_DeepTaintCommand(True, source_key, main_obj, main_res)]
+ while stack:
+ command = stack.pop()
+ if command.pre: # first processing of the object
+ if not command.obj:
+ command.store(command.obj)
+ elif isinstance(command.obj, (str, bytes, bytearray)):
+ if override_pyobject_tainted or not is_pyobject_tainted(command.obj):
+ new_obj = taint_pyobject(
+ pyobject=command.obj,
+ source_name=command.source_key,
+ source_value=command.obj,
+ source_origin=source_key if command.is_key else source_value,
+ )
+ command.store(new_obj)
+ else:
+ command.store(command.obj)
+ elif isinstance(command.obj, abc.Mapping):
+ res = {}
+ stack.append(command.post(res))
+ # use dict fondamental enumeration if possible to bypass any override of custom classes
+ iterable = dict.items(command.obj) if isinstance(command.obj, dict) else command.obj.items()
+ todo = []
+ for k, v in list(iterable):
+ key_store = []
+ todo.append(_DeepTaintCommand(True, k, k, key_store, is_key=True))
+ todo.append(_DeepTaintCommand(True, k, v, res, key_store))
+ stack.extend(reversed(todo))
+ elif isinstance(command.obj, abc.Sequence):
+ res = []
+ stack.append(command.post(res))
+ todo = [_DeepTaintCommand(True, command.source_key, v, res) for v in command.obj]
+ stack.extend(reversed(todo))
+ else:
+ command.store(command.obj)
+ else:
+ command.store(build_new_tainted_object_from_generic_object(command.obj, command.struct))
+ except BaseException:
+ log.debug("taint_structure error", exc_info=True)
+ pass
+ finally:
+ return main_res[0] if main_res else main_obj
+
+
+# Lazy Tainting
+
+
+def _is_tainted_struct(obj):
+ return hasattr(obj, "_origins")
+
+
+class LazyTaintList:
+ """
+ Encapsulate a list to lazily taint all content on any depth
+ It will appear and act as the original list except for some additional private fields
+ """
+
+ def __init__(self, original_list, origins=(0, 0), override_pyobject_tainted=False, source_name="[]"):
+ self._obj = original_list._obj if _is_tainted_struct(original_list) else original_list
+ self._origins = origins
+ self._origin_value = origins[1]
+ self._override_pyobject_tainted = override_pyobject_tainted
+ self._source_name = source_name
+
+ def _taint(self, value):
+ if value:
+ if isinstance(value, (str, bytes, bytearray)):
+ from ._taint_tracking import is_pyobject_tainted
+ from ._taint_tracking import taint_pyobject
+
+ if not is_pyobject_tainted(value) or self._override_pyobject_tainted:
+ try:
+ # TODO: migrate this part to shift ranges instead of creating a new one
+ value = taint_pyobject(
+ pyobject=value,
+ source_name=self._source_name,
+ source_value=value,
+ source_origin=self._origin_value,
+ )
+ except SystemError:
+ # TODO: Find the root cause for
+ # SystemError: NULL object passed to Py_BuildValue
+ log.debug("IAST SystemError while tainting value: %s", value, exc_info=True)
+ except Exception:
+ log.debug("IAST Unexpected exception while tainting value", exc_info=True)
+ elif isinstance(value, abc.Mapping) and not _is_tainted_struct(value):
+ value = LazyTaintDict(
+ value, origins=self._origins, override_pyobject_tainted=self._override_pyobject_tainted
+ )
+ elif isinstance(value, abc.Sequence) and not _is_tainted_struct(value):
+ value = LazyTaintList(
+ value,
+ origins=self._origins,
+ override_pyobject_tainted=self._override_pyobject_tainted,
+ source_name=self._source_name,
+ )
+ return value
+
+ def __add__(self, other):
+ if _is_tainted_struct(other):
+ other = other._obj
+ return LazyTaintList(
+ self._obj + other,
+ origins=self._origins,
+ override_pyobject_tainted=self._override_pyobject_tainted,
+ source_name=self._source_name,
+ )
+
+ @property # type: ignore
+ def __class__(self):
+ return list
+
+ def __contains__(self, item):
+ return item in self._obj
+
+ def __delitem__(self, key):
+ del self._obj[key]
+
+ def __eq__(self, other):
+ if _is_tainted_struct(other):
+ other = other._obj
+ return self._obj == other
+
+ def __ge__(self, other):
+ if _is_tainted_struct(other):
+ other = other._obj
+ return self._obj >= other
+
+ def __getitem__(self, key):
+ return self._taint(self._obj[key])
+
+ def __gt__(self, other):
+ if _is_tainted_struct(other):
+ other = other._obj
+ return self._obj > other
+
+ def __iadd__(self, other):
+ if _is_tainted_struct(other):
+ other = other._obj
+ self._obj += other
+
+ def __imul__(self, other):
+ self._obj *= other
+
+ def __iter__(self):
+ return (self[i] for i in range(len(self._obj)))
+
+ def __le__(self, other):
+ if _is_tainted_struct(other):
+ other = other._obj
+ return self._obj <= other
+
+ def __len__(self):
+ return len(self._obj)
+
+ def __lt__(self, other):
+ if _is_tainted_struct(other):
+ other = other._obj
+ return self._obj < other
+
+ def __mul__(self, other):
+ return LazyTaintList(
+ self._obj * other,
+ origins=self._origins,
+ override_pyobject_tainted=self._override_pyobject_tainted,
+ source_name=self._source_name,
+ )
+
+ def __ne__(self, other):
+ if _is_tainted_struct(other):
+ other = other._obj
+ return self._obj != other
+
+ def __repr__(self):
+ return repr(self._obj)
+
+ def __reversed__(self):
+ return (self[i] for i in reversed(range(len(self._obj))))
+
+ def __setitem__(self, key, value):
+ self._obj[key] = value
+
+ def __str__(self):
+ return str(self._obj)
+
+ def append(self, item):
+ self._obj.append(item)
+
+ def clear(self):
+ # TODO: stop tainting in this case
+ self._obj.clear()
+
+ def copy(self):
+ return LazyTaintList(
+ self._obj.copy(),
+ origins=self._origins,
+ override_pyobject_tainted=self._override_pyobject_tainted,
+ source_name=self._source_name,
+ )
+
+ def count(self, *args):
+ return self._obj.count(*args)
+
+ def extend(self, *args):
+ return self._obj.extend(*args)
+
+ def index(self, *args):
+ return self._obj.index(*args)
+
+ def insert(self, *args):
+ return self._obj.insert(*args)
+
+ def pop(self, *args):
+ return self._taint(self._obj.pop(*args))
+
+ def remove(self, *args):
+ return self._obj.remove(*args)
+
+ def reverse(self, *args):
+ return self._obj.reverse(*args)
+
+ def sort(self, *args):
+ return self._obj.sort(*args)
+
+ # psycopg2 support
+ def __conform__(self, proto):
+ return self
+
+ def getquoted(self) -> bytes:
+ import psycopg2.extensions as ext
+
+ value = ext.adapt(self._obj).getquoted()
+ value = self._taint(value)
+ return value
+
+
+class LazyTaintDict:
+ def __init__(self, original_dict, origins=(0, 0), override_pyobject_tainted=False):
+ self._obj = original_dict
+ self._origins = origins
+ self._origin_key = origins[0]
+ self._origin_value = origins[1]
+ self._override_pyobject_tainted = override_pyobject_tainted
+
+ def _taint(self, value, key, origin=None):
+ if origin is None:
+ origin = self._origin_value
+ if value:
+ if isinstance(value, (str, bytes, bytearray)):
+ from ._taint_tracking import is_pyobject_tainted
+ from ._taint_tracking import taint_pyobject
+
+ if not is_pyobject_tainted(value) or self._override_pyobject_tainted:
+ try:
+ # TODO: migrate this part to shift ranges instead of creating a new one
+ value = taint_pyobject(
+ pyobject=value,
+ source_name=key,
+ source_value=value,
+ source_origin=origin,
+ )
+ except SystemError:
+ # TODO: Find the root cause for
+ # SystemError: NULL object passed to Py_BuildValue
+ log.debug("IAST SystemError while tainting value: %s", value, exc_info=True)
+ except Exception:
+ log.debug("IAST Unexpected exception while tainting value", exc_info=True)
+ elif isinstance(value, abc.Mapping) and not _is_tainted_struct(value):
+ value = LazyTaintDict(
+ value, origins=self._origins, override_pyobject_tainted=self._override_pyobject_tainted
+ )
+ elif isinstance(value, abc.Sequence) and not _is_tainted_struct(value):
+ value = LazyTaintList(
+ value,
+ origins=self._origins,
+ override_pyobject_tainted=self._override_pyobject_tainted,
+ source_name=key,
+ )
+ return value
+
+ @property # type: ignore
+ def __class__(self):
+ return dict
+
+ def __contains__(self, item):
+ return item in self._obj
+
+ def __delitem__(self, key):
+ del self._obj[key]
+
+ def __eq__(self, other):
+ if _is_tainted_struct(other):
+ other = other._obj
+ return self._obj == other
+
+ def __ge__(self, other):
+ if _is_tainted_struct(other):
+ other = other._obj
+ return self._obj >= other
+
+ def __getitem__(self, key):
+ return self._taint(self._obj[key], key)
+
+ def __gt__(self, other):
+ if _is_tainted_struct(other):
+ other = other._obj
+ return self._obj > other
+
+ def __ior__(self, other):
+ if _is_tainted_struct(other):
+ other = other._obj
+ self._obj |= other
+
+ def __iter__(self):
+ return iter(self.keys())
+
+ def __le__(self, other):
+ if _is_tainted_struct(other):
+ other = other._obj
+ return self._obj <= other
+
+ def __len__(self):
+ return len(self._obj)
+
+ def __lt__(self, other):
+ if _is_tainted_struct(other):
+ other = other._obj
+ return self._obj < other
+
+ def __ne__(self, other):
+ if _is_tainted_struct(other):
+ other = other._obj
+ return self._obj != other
+
+ def __or__(self, other):
+ if _is_tainted_struct(other):
+ other = other._obj
+ return LazyTaintDict(
+ self._obj | other,
+ origins=self._origins,
+ override_pyobject_tainted=self._override_pyobject_tainted,
+ )
+
+ def __repr__(self):
+ return repr(self._obj)
+
+ def __reversed__(self):
+ return reversed(self.keys())
+
+ def __setitem__(self, key, value):
+ self._obj[key] = value
+
+ def __str__(self):
+ return str(self._obj)
+
+ def clear(self):
+ # TODO: stop tainting in this case
+ self._obj.clear()
+
+ def copy(self):
+ return LazyTaintDict(
+ self._obj.copy(),
+ origins=self._origins,
+ override_pyobject_tainted=self._override_pyobject_tainted,
+ )
+
+ @classmethod
+ def fromkeys(cls, *args):
+ return dict.fromkeys(*args)
+
+ def get(self, key, default=None):
+ observer = object()
+ res = self._obj.get(key, observer)
+ if res is observer:
+ return default
+ return self._taint(res, key)
+
+ def items(self):
+ for k in self.keys():
+ yield (k, self[k])
+
+ def keys(self):
+ for k in self._obj.keys():
+ yield self._taint(k, k, self._origin_key)
+
+ def pop(self, *args):
+ return self._taint(self._obj.pop(*args), "pop")
+
+ def popitem(self):
+ k, v = self._obj.popitem()
+ return self._taint(k, k), self._taint(v, k)
+
+ def remove(self, *args):
+ return self._obj.remove(*args)
+
+ def setdefault(self, *args):
+ return self._taint(self._obj.setdefault(*args), args[0])
+
+ def update(self, *args, **kargs):
+ self._obj.update(*args, **kargs)
+
+ def values(self):
+ for _, v in self.items():
+ yield v
+
+ # Django Query Dict support
+ def getlist(self, key, default=None):
+ return self._taint(self._obj.getlist(key, default=default), key)
+
+ def setlist(self, key, list_):
+ self._obj.setlist(key, list_)
+
+ def appendlist(self, key, item):
+ self._obj.appendlist(key, item)
+
+ def setlistdefault(self, key, default_list=None):
+ return self._taint(self._obj.setlistdefault(key, default_list=default_list), key)
+
+ def lists(self):
+ return self._taint(self._obj.lists(), self._origin_value)
+
+ def dict(self):
+ return self
+
+ def urlencode(self, safe=None):
+ return self._taint(self._obj.urlencode(safe=safe), self._origin_value)
+
+
+def supported_dbapi_integration(integration_name):
+ return integration_name in DBAPI_INTEGRATIONS or integration_name.startswith(DBAPI_PREFIXES)
+
+
+def check_tainted_args(args, kwargs, tracer, integration_name, method):
+ if supported_dbapi_integration(integration_name) and method.__name__ == "execute":
+ from ._taint_tracking import is_pyobject_tainted
+
+ return len(args) and args[0] and is_pyobject_tainted(args[0])
+
+ return False
+
+
+if asm_config._iast_lazy_taint:
+ # redefining taint_structure to use lazy object if required
+
+ def taint_structure(main_obj, source_key, source_value, override_pyobject_tainted=False): # noqa: F811
+ if isinstance(main_obj, abc.Mapping):
+ return LazyTaintDict(main_obj, source_key, source_value, override_pyobject_tainted)
+ elif isinstance(main_obj, abc.Sequence):
+ return LazyTaintList(main_obj, source_key, source_value, override_pyobject_tainted)
diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_utils.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_utils.py
new file mode 100644
index 0000000..3f994b8
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/_utils.py
@@ -0,0 +1,124 @@
+import json
+import re
+import string
+import sys
+from typing import TYPE_CHECKING # noqa:F401
+
+import attr
+
+from ddtrace.internal.logger import get_logger
+from ddtrace.settings.asm import config as asm_config
+
+
+if TYPE_CHECKING:
+ from typing import Any # noqa:F401
+ from typing import List # noqa:F401
+ from typing import Set # noqa:F401
+ from typing import Tuple # noqa:F401
+
+
+def _is_python_version_supported(): # type: () -> bool
+ # IAST supports Python versions 3.6 to 3.12
+ return (3, 6, 0) <= sys.version_info < (3, 13, 0)
+
+
+def _is_iast_enabled():
+ if not asm_config._iast_enabled:
+ return False
+
+ if not _is_python_version_supported():
+ log = get_logger(__name__)
+ log.info("IAST is not compatible with the current Python version")
+ return False
+
+ return True
+
+
+# Used to cache the compiled regular expression
+_SOURCE_NAME_SCRUB = None
+_SOURCE_VALUE_SCRUB = None
+
+
+def _has_to_scrub(s): # type: (str) -> bool
+ global _SOURCE_NAME_SCRUB
+ global _SOURCE_VALUE_SCRUB
+
+ if _SOURCE_NAME_SCRUB is None:
+ _SOURCE_NAME_SCRUB = re.compile(asm_config._iast_redaction_name_pattern)
+ _SOURCE_VALUE_SCRUB = re.compile(asm_config._iast_redaction_value_pattern)
+
+ return _SOURCE_NAME_SCRUB.match(s) is not None or _SOURCE_VALUE_SCRUB.match(s) is not None
+
+
+_REPLACEMENTS = string.ascii_letters
+_LEN_REPLACEMENTS = len(_REPLACEMENTS)
+
+
+def _scrub(s, has_range=False): # type: (str, bool) -> str
+ if has_range:
+ return "".join([_REPLACEMENTS[i % _LEN_REPLACEMENTS] for i in range(len(s))])
+ return "*" * len(s)
+
+
+def _is_evidence_value_parts(value): # type: (Any) -> bool
+ return isinstance(value, (set, list))
+
+
+def _scrub_get_tokens_positions(text, tokens):
+ # type: (str, Set[str]) -> List[Tuple[int, int]]
+ token_positions = []
+
+ for token in tokens:
+ position = text.find(token)
+ if position != -1:
+ token_positions.append((position, position + len(token)))
+
+ token_positions.sort()
+ return token_positions
+
+
+def _iast_report_to_str(data):
+ from ._taint_tracking import OriginType
+ from ._taint_tracking import origin_to_str
+
+ class OriginTypeEncoder(json.JSONEncoder):
+ def default(self, obj):
+ if isinstance(obj, OriginType):
+ # if the obj is uuid, we simply return the value of uuid
+ return origin_to_str(obj)
+ return json.JSONEncoder.default(self, obj)
+
+ return json.dumps(attr.asdict(data, filter=lambda attr, x: x is not None), cls=OriginTypeEncoder)
+
+
+def _get_patched_code(module_path, module_name): # type: (str, str) -> str
+ """
+ Print the patched code to stdout, for debugging purposes.
+ """
+ import astunparse
+
+ from ddtrace.appsec._iast._ast.ast_patching import get_encoding
+ from ddtrace.appsec._iast._ast.ast_patching import visit_ast
+
+ with open(module_path, "r", encoding=get_encoding(module_path)) as source_file:
+ source_text = source_file.read()
+
+ new_source = visit_ast(
+ source_text,
+ module_path,
+ module_name=module_name,
+ )
+
+ # If no modifications are done,
+ # visit_ast returns None
+ if not new_source:
+ return ""
+
+ new_code = astunparse.unparse(new_source)
+ return new_code
+
+
+if __name__ == "__main__":
+ MODULE_PATH = sys.argv[1]
+ MODULE_NAME = sys.argv[2]
+ print(_get_patched_code(MODULE_PATH, MODULE_NAME))
diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/constants.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/constants.py
new file mode 100644
index 0000000..bd9e739
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/constants.py
@@ -0,0 +1,89 @@
+from typing import Any
+from typing import Dict
+
+
+VULN_INSECURE_HASHING_TYPE = "WEAK_HASH"
+VULN_WEAK_CIPHER_TYPE = "WEAK_CIPHER"
+VULN_SQL_INJECTION = "SQL_INJECTION"
+VULN_PATH_TRAVERSAL = "PATH_TRAVERSAL"
+VULN_WEAK_RANDOMNESS = "WEAK_RANDOMNESS"
+VULN_INSECURE_COOKIE = "INSECURE_COOKIE"
+VULN_NO_HTTPONLY_COOKIE = "NO_HTTPONLY_COOKIE"
+VULN_NO_SAMESITE_COOKIE = "NO_SAMESITE_COOKIE"
+VULN_CMDI = "COMMAND_INJECTION"
+VULN_SSRF = "SSRF"
+
+VULNERABILITY_TOKEN_TYPE = Dict[int, Dict[str, Any]]
+
+EVIDENCE_ALGORITHM_TYPE = "ALGORITHM"
+EVIDENCE_SQL_INJECTION = "SQL_INJECTION"
+EVIDENCE_PATH_TRAVERSAL = "PATH_TRAVERSAL"
+EVIDENCE_WEAK_RANDOMNESS = "WEAK_RANDOMNESS"
+EVIDENCE_COOKIE = "COOKIE"
+EVIDENCE_CMDI = "COMMAND"
+EVIDENCE_SSRF = "SSRF"
+
+MD5_DEF = "md5"
+SHA1_DEF = "sha1"
+
+DES_DEF = "des"
+BLOWFISH_DEF = "blowfish"
+RC2_DEF = "rc2"
+RC4_DEF = "rc4"
+IDEA_DEF = "idea"
+
+DD_IAST_TELEMETRY_VERBOSITY = "DD_IAST_TELEMETRY_VERBOSITY"
+
+DEFAULT_WEAK_HASH_ALGORITHMS = {MD5_DEF, SHA1_DEF}
+
+DEFAULT_WEAK_CIPHER_ALGORITHMS = {DES_DEF, BLOWFISH_DEF, RC2_DEF, RC4_DEF, IDEA_DEF}
+
+DEFAULT_WEAK_RANDOMNESS_FUNCTIONS = {
+ "random",
+ "randint",
+ "randrange",
+ "choice",
+ "shuffle",
+ "betavariate",
+ "gammavariate",
+ "expovariate",
+ "choices",
+ "gauss",
+ "uniform",
+ "lognormvariate",
+ "normalvariate",
+ "paretovariate",
+ "sample",
+ "triangular",
+ "vonmisesvariate",
+ "weibullvariate",
+ "randbytes",
+}
+
+DEFAULT_PATH_TRAVERSAL_FUNCTIONS = {
+ "glob": {"glob"},
+ "os": {
+ "mkdir",
+ "remove",
+ "rename",
+ "rmdir",
+ "listdir",
+ },
+ "pickle": {"load"},
+ "_pickle": {"load"},
+ "posix": {
+ "mkdir",
+ "remove",
+ "rename",
+ "rmdir",
+ "listdir",
+ },
+ "shutil": {
+ "copy",
+ "copytree",
+ "move",
+ "rmtree",
+ },
+ "tarfile": {"open"},
+ "zipfile": {"ZipFile"},
+}
diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/processor.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/processor.py
new file mode 100644
index 0000000..4109b8e
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/processor.py
@@ -0,0 +1,95 @@
+from typing import TYPE_CHECKING
+
+import attr
+
+from ddtrace.appsec._constants import APPSEC
+from ddtrace.appsec._constants import IAST
+from ddtrace.constants import ORIGIN_KEY
+from ddtrace.ext import SpanTypes
+from ddtrace.internal import core
+from ddtrace.internal.logger import get_logger
+from ddtrace.internal.processor import SpanProcessor
+
+from .._trace_utils import _asm_manual_keep
+from . import oce
+from ._metrics import _set_metric_iast_request_tainted
+from ._metrics import _set_span_tag_iast_executed_sink
+from ._metrics import _set_span_tag_iast_request_tainted
+from ._utils import _iast_report_to_str
+from ._utils import _is_iast_enabled
+
+
+if TYPE_CHECKING: # pragma: no cover
+ from typing import Optional # noqa:F401
+
+ from ddtrace.span import Span # noqa:F401
+
+log = get_logger(__name__)
+
+
+@attr.s(eq=False)
+class AppSecIastSpanProcessor(SpanProcessor):
+ @staticmethod
+ def is_span_analyzed(span=None):
+ # type: (Optional[Span]) -> bool
+ if span is None:
+ from ddtrace import tracer
+
+ span = tracer.current_root_span()
+
+ if span and span.span_type == SpanTypes.WEB and core.get_item(IAST.REQUEST_IAST_ENABLED, span=span):
+ return True
+ return False
+
+ def on_span_start(self, span):
+ # type: (Span) -> None
+ if span.span_type != SpanTypes.WEB:
+ return
+
+ if not _is_iast_enabled():
+ return
+
+ request_iast_enabled = False
+ if oce.acquire_request(span):
+ from ._taint_tracking import create_context
+
+ request_iast_enabled = True
+ create_context()
+
+ core.set_item(IAST.REQUEST_IAST_ENABLED, request_iast_enabled, span=span)
+
+ def on_span_finish(self, span):
+ # type: (Span) -> None
+ """Report reported vulnerabilities.
+
+ Span Tags:
+ - `_dd.iast.json`: Only when one or more vulnerabilities have been detected will we include the custom tag.
+ - `_dd.iast.enabled`: Set to 1 when IAST is enabled in a request. If a request is disabled
+ (e.g. by sampling), then it is not set.
+ """
+ if span.span_type != SpanTypes.WEB:
+ return
+
+ if not core.get_item(IAST.REQUEST_IAST_ENABLED, span=span):
+ span.set_metric(IAST.ENABLED, 0.0)
+ return
+
+ from ._taint_tracking import reset_context # noqa: F401
+
+ span.set_metric(IAST.ENABLED, 1.0)
+
+ data = core.get_item(IAST.CONTEXT_KEY, span=span)
+
+ if data:
+ span.set_tag_str(IAST.JSON, _iast_report_to_str(data))
+ _asm_manual_keep(span)
+
+ _set_metric_iast_request_tainted()
+ _set_span_tag_iast_request_tainted(span)
+ _set_span_tag_iast_executed_sink(span)
+ reset_context()
+
+ if span.get_tag(ORIGIN_KEY) is None:
+ span.set_tag_str(ORIGIN_KEY, APPSEC.ORIGIN_VALUE)
+
+ oce.release_request()
diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/reporter.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/reporter.py
new file mode 100644
index 0000000..5a95aa1
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/reporter.py
@@ -0,0 +1,87 @@
+from functools import reduce
+import json
+import operator
+import os
+from typing import TYPE_CHECKING
+from typing import List
+from typing import Set
+import zlib
+
+import attr
+
+
+if TYPE_CHECKING:
+ import Any # noqa:F401
+ import Dict # noqa:F401
+ import Optional # noqa:F401
+
+
+def _only_if_true(value):
+ return value if value else None
+
+
+@attr.s(eq=False, hash=False)
+class Evidence(object):
+ value = attr.ib(type=str, default=None) # type: Optional[str]
+ pattern = attr.ib(type=str, default=None) # type: Optional[str]
+ valueParts = attr.ib(type=list, default=None) # type: Optional[List[Dict[str, Any]]]
+ redacted = attr.ib(type=bool, default=False, converter=_only_if_true) # type: bool
+
+ def _valueParts_hash(self):
+ if not self.valueParts:
+ return
+
+ _hash = 0
+ for part in self.valueParts:
+ json_str = json.dumps(part, sort_keys=True)
+ part_hash = zlib.crc32(json_str.encode())
+ _hash ^= part_hash
+
+ return _hash
+
+ def __hash__(self):
+ return hash((self.value, self.pattern, self._valueParts_hash(), self.redacted))
+
+ def __eq__(self, other):
+ return (
+ self.value == other.value
+ and self.pattern == other.pattern
+ and self._valueParts_hash() == other._valueParts_hash()
+ and self.redacted == other.redacted
+ )
+
+
+@attr.s(eq=True, hash=True)
+class Location(object):
+ spanId = attr.ib(type=int, eq=False, hash=False, repr=False) # type: int
+ path = attr.ib(type=str, default=None) # type: Optional[str]
+ line = attr.ib(type=int, default=None) # type: Optional[int]
+
+
+@attr.s(eq=True, hash=True)
+class Vulnerability(object):
+ type = attr.ib(type=str) # type: str
+ evidence = attr.ib(type=Evidence, repr=False) # type: Evidence
+ location = attr.ib(type=Location, hash="PYTEST_CURRENT_TEST" in os.environ) # type: Location
+ hash = attr.ib(init=False, eq=False, hash=False, repr=False) # type: int
+
+ def __attrs_post_init__(self):
+ self.hash = zlib.crc32(repr(self).encode())
+
+
+@attr.s(eq=True, hash=True)
+class Source(object):
+ origin = attr.ib(type=str) # type: str
+ name = attr.ib(type=str) # type: str
+ redacted = attr.ib(type=bool, default=False, converter=_only_if_true) # type: bool
+ value = attr.ib(type=str, default=None) # type: Optional[str]
+ pattern = attr.ib(type=str, default=None) # type: Optional[str]
+
+
+@attr.s(eq=False, hash=False)
+class IastSpanReporter(object):
+ sources = attr.ib(type=List[Source], factory=list) # type: List[Source]
+ vulnerabilities = attr.ib(type=Set[Vulnerability], factory=set) # type: Set[Vulnerability]
+
+ def __hash__(self):
+ return reduce(operator.xor, (hash(obj) for obj in set(self.sources) | self.vulnerabilities))
diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/taint_sinks/__init__.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/taint_sinks/__init__.py
new file mode 100644
index 0000000..e7c8787
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/taint_sinks/__init__.py
@@ -0,0 +1,8 @@
+from .ast_taint import ast_function
+from .path_traversal import open_path_traversal
+
+
+__all__ = [
+ "open_path_traversal",
+ "ast_function",
+]
diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/taint_sinks/_base.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/taint_sinks/_base.py
new file mode 100644
index 0000000..8327bd8
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/taint_sinks/_base.py
@@ -0,0 +1,314 @@
+import os
+import time
+from typing import TYPE_CHECKING # noqa:F401
+from typing import cast # noqa:F401
+
+from ddtrace import tracer
+from ddtrace.appsec._constants import IAST
+from ddtrace.internal import core
+from ddtrace.internal.logger import get_logger
+from ddtrace.internal.utils.cache import LFUCache
+from ddtrace.settings.asm import config as asm_config
+
+from ..._deduplications import deduplication
+from .._overhead_control_engine import Operation
+from .._stacktrace import get_info_frame
+from .._utils import _has_to_scrub
+from .._utils import _is_evidence_value_parts
+from .._utils import _scrub
+from ..processor import AppSecIastSpanProcessor
+from ..reporter import Evidence
+from ..reporter import IastSpanReporter
+from ..reporter import Location
+from ..reporter import Source
+from ..reporter import Vulnerability
+
+
+if TYPE_CHECKING: # pragma: no cover
+ from typing import Any # noqa:F401
+ from typing import Callable # noqa:F401
+ from typing import Dict # noqa:F401
+ from typing import List # noqa:F401
+ from typing import Optional # noqa:F401
+ from typing import Set # noqa:F401
+ from typing import Text # noqa:F401
+ from typing import Union # noqa:F401
+
+log = get_logger(__name__)
+
+CWD = os.path.abspath(os.getcwd())
+
+
+class taint_sink_deduplication(deduplication):
+ def __call__(self, *args, **kwargs):
+ # we skip 0, 1 and last position because its the cls, span and sources respectively
+ result = None
+ if self.is_deduplication_enabled() is False:
+ result = self.func(*args, **kwargs)
+ else:
+ raw_log_hash = hash("".join([str(arg) for arg in args[2:-1]]))
+ last_reported_timestamp = self.get_last_time_reported(raw_log_hash)
+ if time.time() > last_reported_timestamp:
+ result = self.func(*args, **kwargs)
+ self.reported_logs[raw_log_hash] = time.time() + self._time_lapse
+ return result
+
+
+def _check_positions_contained(needle, container):
+ needle_start, needle_end = needle
+ container_start, container_end = container
+
+ return (
+ (container_start <= needle_start < container_end)
+ or (container_start < needle_end <= container_end)
+ or (needle_start <= container_start < needle_end)
+ or (needle_start < container_end <= needle_end)
+ )
+
+
+class VulnerabilityBase(Operation):
+ vulnerability_type = ""
+ evidence_type = ""
+ _redacted_report_cache = LFUCache()
+
+ @classmethod
+ def _reset_cache(cls):
+ cls._redacted_report_cache.clear()
+
+ @classmethod
+ def wrap(cls, func):
+ # type: (Callable) -> Callable
+ def wrapper(wrapped, instance, args, kwargs):
+ # type: (Callable, Any, Any, Any) -> Any
+ """Get the current root Span and attach it to the wrapped function. We need the span to report the
+ vulnerability and update the context with the report information.
+ """
+ if AppSecIastSpanProcessor.is_span_analyzed() and cls.has_quota():
+ return func(wrapped, instance, args, kwargs)
+ else:
+ log.debug("IAST: no vulnerability quota to analyze more sink points")
+ return wrapped(*args, **kwargs)
+
+ return wrapper
+
+ @classmethod
+ @taint_sink_deduplication
+ def _prepare_report(cls, span, vulnerability_type, evidence, file_name, line_number, sources):
+ report = core.get_item(IAST.CONTEXT_KEY, span=span)
+ if report:
+ report.vulnerabilities.add(
+ Vulnerability(
+ type=vulnerability_type,
+ evidence=evidence,
+ location=Location(path=file_name, line=line_number, spanId=span.span_id),
+ )
+ )
+
+ else:
+ report = IastSpanReporter(
+ vulnerabilities={
+ Vulnerability(
+ type=vulnerability_type,
+ evidence=evidence,
+ location=Location(path=file_name, line=line_number, spanId=span.span_id),
+ )
+ }
+ )
+ if sources:
+
+ def cast_value(value):
+ if isinstance(value, (bytes, bytearray)):
+ value_decoded = value.decode("utf-8")
+ else:
+ value_decoded = value
+ return value_decoded
+
+ report.sources = [Source(origin=x.origin, name=x.name, value=cast_value(x.value)) for x in sources]
+
+ redacted_report = cls._redacted_report_cache.get(
+ hash(report), lambda x: cls._redact_report(cast(IastSpanReporter, report))
+ )
+ core.set_item(IAST.CONTEXT_KEY, redacted_report, span=span)
+
+ return True
+
+ @classmethod
+ def report(cls, evidence_value="", sources=None):
+ # type: (Union[Text|List[Dict[str, Any]]], Optional[List[Source]]) -> None
+ """Build a IastSpanReporter instance to report it in the `AppSecIastSpanProcessor` as a string JSON"""
+
+ if cls.acquire_quota():
+ if not tracer or not hasattr(tracer, "current_root_span"):
+ log.debug(
+ "[IAST] VulnerabilityReporter is trying to report an evidence, "
+ "but not tracer or tracer has no root span"
+ )
+ return None
+
+ span = tracer.current_root_span()
+ if not span:
+ log.debug(
+ "[IAST] VulnerabilityReporter. No root span in the current execution. Skipping IAST taint sink."
+ )
+ return None
+
+ file_name = None
+ line_number = None
+
+ skip_location = getattr(cls, "skip_location", False)
+ if not skip_location:
+ frame_info = get_info_frame(CWD)
+ if not frame_info:
+ return None
+
+ file_name, line_number = frame_info
+
+ # Remove CWD prefix
+ if file_name.startswith(CWD):
+ file_name = os.path.relpath(file_name, start=CWD)
+
+ if not cls.is_not_reported(file_name, line_number):
+ return
+
+ if _is_evidence_value_parts(evidence_value):
+ evidence = Evidence(valueParts=evidence_value)
+ # Evidence is a string in weak cipher, weak hash and weak randomness
+ elif isinstance(evidence_value, (str, bytes, bytearray)):
+ evidence = Evidence(value=evidence_value)
+ else:
+ log.debug("Unexpected evidence_value type: %s", type(evidence_value))
+ evidence = Evidence(value="")
+
+ result = cls._prepare_report(span, cls.vulnerability_type, evidence, file_name, line_number, sources)
+ # If result is None that's mean deduplication raises and no vulnerability wasn't reported, with that,
+ # we need to restore the quota
+ if not result:
+ cls.increment_quota()
+
+ @classmethod
+ def _extract_sensitive_tokens(cls, report):
+ # type: (Dict[Vulnerability, str]) -> Dict[int, Dict[str, Any]]
+ log.debug("Base class VulnerabilityBase._extract_sensitive_tokens called")
+ return {}
+
+ @classmethod
+ def _get_vulnerability_text(cls, vulnerability):
+ if vulnerability and vulnerability.evidence.value is not None:
+ return vulnerability.evidence.value
+
+ if vulnerability.evidence.valueParts is not None:
+ return "".join(
+ [
+ (part.get("value", "") if type(part) is not str else part)
+ for part in vulnerability.evidence.valueParts
+ ]
+ )
+
+ return ""
+
+ @classmethod
+ def replace_tokens(
+ cls,
+ vuln,
+ vulns_to_tokens,
+ has_range=False,
+ ):
+ ret = vuln.evidence.value
+ replaced = False
+
+ for token in vulns_to_tokens[hash(vuln)]["tokens"]:
+ ret = ret.replace(token, _scrub(token, has_range))
+ replaced = True
+
+ return ret, replaced
+
+ @classmethod
+ def _redact_report(cls, report): # type: (IastSpanReporter) -> IastSpanReporter
+ if not asm_config._iast_redaction_enabled:
+ return report
+
+ # See if there is a match on either any of the sources or value parts of the report
+ found = False
+
+ for source in report.sources:
+ # Join them so we only run the regexps once for each source
+ joined_fields = "%s%s" % (source.name, source.value)
+ if _has_to_scrub(joined_fields):
+ found = True
+ break
+
+ vulns_to_text = {}
+
+ if not found:
+ # Check the evidence's value/s
+ for vuln in report.vulnerabilities:
+ vulnerability_text = cls._get_vulnerability_text(vuln)
+ if _has_to_scrub(vulnerability_text):
+ vulns_to_text[vuln] = vulnerability_text
+ found = True
+ break
+
+ if not found:
+ return report
+
+ if not vulns_to_text:
+ vulns_to_text = {vuln: cls._get_vulnerability_text(vuln) for vuln in report.vulnerabilities}
+
+ # If we're here, some potentially sensitive information was found, we delegate on
+ # the specific subclass the task of extracting the variable tokens (e.g. literals inside
+ # quotes for SQL Injection). Note that by just having one potentially sensitive match
+ # we need to then scrub all the tokens, thus why we do it in two steps instead of one
+ vulns_to_tokens = cls._extract_sensitive_tokens(vulns_to_text)
+
+ if not vulns_to_tokens:
+ return report
+
+ all_tokens = set() # type: Set[str]
+ for _, value_dict in vulns_to_tokens.items():
+ all_tokens.update(value_dict["tokens"])
+
+ # Iterate over all the sources, if one of the tokens match it, redact it
+ for source in report.sources:
+ if source.name in all_tokens or source.value in all_tokens:
+ source.pattern = _scrub(source.value, has_range=True)
+ source.redacted = True
+ source.value = None
+
+ # Same for all the evidence values
+ for vuln in report.vulnerabilities:
+ # Use the initial hash directly as iteration key since the vuln itself will change
+ vuln_hash = hash(vuln)
+ if vuln.evidence.value is not None:
+ pattern, replaced = cls.replace_tokens(vuln, vulns_to_tokens, hasattr(vuln.evidence.value, "source"))
+ if replaced:
+ vuln.evidence.pattern = pattern
+ vuln.evidence.redacted = True
+ vuln.evidence.value = None
+ elif vuln.evidence.valueParts is not None:
+ idx = 0
+ for part in vuln.evidence.valueParts:
+ value = part["value"]
+ part_len = len(value)
+ part_start = idx
+ part_end = idx + part_len
+ pattern_list = []
+
+ for positions in vulns_to_tokens[vuln_hash]["token_positions"]:
+ if _check_positions_contained(positions, (part_start, part_end)):
+ part_scrub_start = max(positions[0] - idx, 0)
+ part_scrub_end = positions[1] - idx
+ to_scrub = value[part_scrub_start:part_scrub_end]
+ scrubbed = _scrub(to_scrub, "source" in part)
+ pattern_list.append(value[:part_scrub_start] + scrubbed + value[part_scrub_end:])
+ part["redacted"] = True
+ else:
+ pattern_list.append(value[part_start:part_end])
+ continue
+
+ if "redacted" in part:
+ part["pattern"] = "".join(pattern_list)
+ del part["value"]
+
+ idx += part_len
+
+ return report
diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/taint_sinks/ast_taint.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/taint_sinks/ast_taint.py
new file mode 100644
index 0000000..af8f59b
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/taint_sinks/ast_taint.py
@@ -0,0 +1,47 @@
+from typing import TYPE_CHECKING # noqa:F401
+
+from ..._constants import IAST_SPAN_TAGS
+from .._metrics import _set_metric_iast_executed_sink
+from .._metrics import increment_iast_span_metric
+from ..constants import DEFAULT_PATH_TRAVERSAL_FUNCTIONS
+from ..constants import DEFAULT_WEAK_RANDOMNESS_FUNCTIONS
+from .path_traversal import check_and_report_path_traversal
+from .weak_randomness import WeakRandomness
+
+
+if TYPE_CHECKING:
+ from typing import Any # noqa:F401
+ from typing import Callable # noqa:F401
+
+
+def ast_function(
+ func, # type: Callable
+ flag_added_args, # type: Any
+ *args, # type: Any
+ **kwargs, # type: Any
+): # type: (...) -> Any
+ instance = getattr(func, "__self__", None)
+ func_name = getattr(func, "__name__", None)
+ cls_name = ""
+ if instance is not None and func_name:
+ try:
+ cls_name = instance.__class__.__name__
+ except AttributeError:
+ pass
+
+ if flag_added_args > 0:
+ args = args[flag_added_args:]
+
+ if (
+ instance.__class__.__module__ == "random"
+ and cls_name == "Random"
+ and func_name in DEFAULT_WEAK_RANDOMNESS_FUNCTIONS
+ ):
+ # Weak, run the analyzer
+ increment_iast_span_metric(IAST_SPAN_TAGS.TELEMETRY_EXECUTED_SINK, WeakRandomness.vulnerability_type)
+ _set_metric_iast_executed_sink(WeakRandomness.vulnerability_type)
+ WeakRandomness.report(evidence_value=cls_name + "." + func_name)
+ elif hasattr(func, "__module__") and DEFAULT_PATH_TRAVERSAL_FUNCTIONS.get(func.__module__):
+ if func_name in DEFAULT_PATH_TRAVERSAL_FUNCTIONS[func.__module__]:
+ check_and_report_path_traversal(*args, **kwargs)
+ return func(*args, **kwargs)
diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/taint_sinks/command_injection.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/taint_sinks/command_injection.py
new file mode 100644
index 0000000..b792adc
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/taint_sinks/command_injection.py
@@ -0,0 +1,254 @@
+import os
+import re
+import subprocess # nosec
+from typing import TYPE_CHECKING # noqa:F401
+from typing import List # noqa:F401
+from typing import Set # noqa:F401
+from typing import Union # noqa:F401
+
+from ddtrace.contrib import trace_utils
+from ddtrace.internal.logger import get_logger
+from ddtrace.settings.asm import config as asm_config
+
+from ..._constants import IAST_SPAN_TAGS
+from .. import oce
+from .._metrics import increment_iast_span_metric
+from .._utils import _has_to_scrub
+from .._utils import _scrub
+from .._utils import _scrub_get_tokens_positions
+from ..constants import EVIDENCE_CMDI
+from ..constants import VULN_CMDI
+from ._base import VulnerabilityBase
+from ._base import _check_positions_contained
+
+
+if TYPE_CHECKING:
+ from typing import Any # noqa:F401
+ from typing import Dict # noqa:F401
+
+ from ..reporter import IastSpanReporter # noqa:F401
+ from ..reporter import Vulnerability # noqa:F401
+
+
+log = get_logger(__name__)
+
+_INSIDE_QUOTES_REGEXP = re.compile(r"^(?:\s*(?:sudo|doas)\s+)?\b\S+\b\s*(.*)")
+
+
+def get_version():
+ # type: () -> str
+ return ""
+
+
+def patch():
+ if not asm_config._iast_enabled:
+ return
+
+ if not getattr(os, "_datadog_cmdi_patch", False):
+ trace_utils.wrap(os, "system", _iast_cmdi_ossystem)
+
+ # all os.spawn* variants eventually use this one:
+ trace_utils.wrap(os, "_spawnvef", _iast_cmdi_osspawn)
+
+ if not getattr(subprocess, "_datadog_cmdi_patch", False):
+ trace_utils.wrap(subprocess, "Popen.__init__", _iast_cmdi_subprocess_init)
+
+ os._datadog_cmdi_patch = True
+ subprocess._datadog_cmdi_patch = True
+
+
+def unpatch():
+ # type: () -> None
+ trace_utils.unwrap(os, "system")
+ trace_utils.unwrap(os, "_spawnvef")
+ trace_utils.unwrap(subprocess.Popen, "__init__")
+
+ os._datadog_cmdi_patch = False # type: ignore[attr-defined]
+ subprocess._datadog_cmdi_patch = False # type: ignore[attr-defined]
+
+
+def _iast_cmdi_ossystem(wrapped, instance, args, kwargs):
+ _iast_report_cmdi(args[0])
+ return wrapped(*args, **kwargs)
+
+
+def _iast_cmdi_osspawn(wrapped, instance, args, kwargs):
+ mode, file, func_args, _, _ = args
+ _iast_report_cmdi(func_args)
+
+ return wrapped(*args, **kwargs)
+
+
+def _iast_cmdi_subprocess_init(wrapped, instance, args, kwargs):
+ cmd_args = args[0] if len(args) else kwargs["args"]
+ _iast_report_cmdi(cmd_args)
+
+ return wrapped(*args, **kwargs)
+
+
+@oce.register
+class CommandInjection(VulnerabilityBase):
+ vulnerability_type = VULN_CMDI
+ evidence_type = EVIDENCE_CMDI
+
+ @classmethod
+ def report(cls, evidence_value=None, sources=None):
+ if isinstance(evidence_value, (str, bytes, bytearray)):
+ from .._taint_tracking import taint_ranges_as_evidence_info
+
+ evidence_value, sources = taint_ranges_as_evidence_info(evidence_value)
+ super(CommandInjection, cls).report(evidence_value=evidence_value, sources=sources)
+
+ @classmethod
+ def _extract_sensitive_tokens(cls, vulns_to_text):
+ # type: (Dict[Vulnerability, str]) -> Dict[int, Dict[str, Any]]
+ ret = {} # type: Dict[int, Dict[str, Any]]
+ for vuln, text in vulns_to_text.items():
+ vuln_hash = hash(vuln)
+ ret[vuln_hash] = {
+ "tokens": set(_INSIDE_QUOTES_REGEXP.findall(text)),
+ }
+ ret[vuln_hash]["token_positions"] = _scrub_get_tokens_positions(text, ret[vuln_hash]["tokens"])
+
+ return ret
+
+ @classmethod
+ def replace_tokens(
+ cls,
+ vuln,
+ vulns_to_tokens,
+ has_range=False,
+ ):
+ ret = vuln.evidence.value
+ replaced = False
+
+ for token in vulns_to_tokens[hash(vuln)]["tokens"]:
+ ret = ret.replace(token, "")
+ replaced = True
+
+ return ret, replaced
+
+ @classmethod
+ def _redact_report(cls, report): # type: (IastSpanReporter) -> IastSpanReporter
+ if not asm_config._iast_redaction_enabled:
+ return report
+
+ # See if there is a match on either any of the sources or value parts of the report
+ found = False
+
+ for source in report.sources:
+ # Join them so we only run the regexps once for each source
+ joined_fields = "%s%s" % (source.name, source.value)
+ if _has_to_scrub(joined_fields):
+ found = True
+ break
+
+ vulns_to_text = {}
+
+ if not found:
+ # Check the evidence's value/s
+ for vuln in report.vulnerabilities:
+ vulnerability_text = cls._get_vulnerability_text(vuln)
+ if _has_to_scrub(vulnerability_text) or _INSIDE_QUOTES_REGEXP.match(vulnerability_text):
+ vulns_to_text[vuln] = vulnerability_text
+ found = True
+ break
+
+ if not found:
+ return report
+
+ if not vulns_to_text:
+ vulns_to_text = {vuln: cls._get_vulnerability_text(vuln) for vuln in report.vulnerabilities}
+
+ # If we're here, some potentially sensitive information was found, we delegate on
+ # the specific subclass the task of extracting the variable tokens (e.g. literals inside
+ # quotes for SQL Injection). Note that by just having one potentially sensitive match
+ # we need to then scrub all the tokens, thus why we do it in two steps instead of one
+ vulns_to_tokens = cls._extract_sensitive_tokens(vulns_to_text)
+
+ if not vulns_to_tokens:
+ return report
+
+ all_tokens = set() # type: Set[str]
+ for _, value_dict in vulns_to_tokens.items():
+ all_tokens.update(value_dict["tokens"])
+
+ # Iterate over all the sources, if one of the tokens match it, redact it
+ for source in report.sources:
+ if source.name in "".join(all_tokens) or source.value in "".join(all_tokens):
+ source.pattern = _scrub(source.value, has_range=True)
+ source.redacted = True
+ source.value = None
+
+ # Same for all the evidence values
+ try:
+ for vuln in report.vulnerabilities:
+ # Use the initial hash directly as iteration key since the vuln itself will change
+ vuln_hash = hash(vuln)
+ if vuln.evidence.value is not None:
+ pattern, replaced = cls.replace_tokens(
+ vuln, vulns_to_tokens, hasattr(vuln.evidence.value, "source")
+ )
+ if replaced:
+ vuln.evidence.pattern = pattern
+ vuln.evidence.redacted = True
+ vuln.evidence.value = None
+ elif vuln.evidence.valueParts is not None:
+ idx = 0
+ new_value_parts = []
+ for part in vuln.evidence.valueParts:
+ value = part["value"]
+ part_len = len(value)
+ part_start = idx
+ part_end = idx + part_len
+ pattern_list = []
+
+ for positions in vulns_to_tokens[vuln_hash]["token_positions"]:
+ if _check_positions_contained(positions, (part_start, part_end)):
+ part_scrub_start = max(positions[0] - idx, 0)
+ part_scrub_end = positions[1] - idx
+ pattern_list.append(value[:part_scrub_start] + "" + value[part_scrub_end:])
+ if part.get("source", False) is not False:
+ source = report.sources[part["source"]]
+ if source.redacted:
+ part["redacted"] = source.redacted
+ part["pattern"] = source.pattern
+ del part["value"]
+ new_value_parts.append(part)
+ break
+ else:
+ part["value"] = "".join(pattern_list)
+ new_value_parts.append(part)
+ new_value_parts.append({"redacted": True})
+ break
+ else:
+ new_value_parts.append(part)
+ pattern_list.append(value[part_start:part_end])
+ break
+
+ idx += part_len
+ vuln.evidence.valueParts = new_value_parts
+ except (ValueError, KeyError):
+ log.debug("an error occurred while redacting cmdi", exc_info=True)
+ return report
+
+
+def _iast_report_cmdi(shell_args):
+ # type: (Union[str, List[str]]) -> None
+ report_cmdi = ""
+ from .._metrics import _set_metric_iast_executed_sink
+ from .._taint_tracking import is_pyobject_tainted
+ from .._taint_tracking.aspects import join_aspect
+
+ if isinstance(shell_args, (list, tuple)):
+ for arg in shell_args:
+ if is_pyobject_tainted(arg):
+ report_cmdi = join_aspect(" ".join, 1, " ", shell_args)
+ break
+ elif is_pyobject_tainted(shell_args):
+ report_cmdi = shell_args
+
+ if report_cmdi:
+ increment_iast_span_metric(IAST_SPAN_TAGS.TELEMETRY_EXECUTED_SINK, CommandInjection.vulnerability_type)
+ _set_metric_iast_executed_sink(CommandInjection.vulnerability_type)
+ CommandInjection.report(evidence_value=report_cmdi)
diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/taint_sinks/insecure_cookie.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/taint_sinks/insecure_cookie.py
new file mode 100644
index 0000000..bb81477
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/taint_sinks/insecure_cookie.py
@@ -0,0 +1,72 @@
+from typing import TYPE_CHECKING # noqa:F401
+
+from ..._constants import IAST_SPAN_TAGS
+from .. import oce
+from .._metrics import _set_metric_iast_executed_sink
+from .._metrics import increment_iast_span_metric
+from ..constants import EVIDENCE_COOKIE
+from ..constants import VULN_INSECURE_COOKIE
+from ..constants import VULN_NO_HTTPONLY_COOKIE
+from ..constants import VULN_NO_SAMESITE_COOKIE
+from ..taint_sinks._base import VulnerabilityBase
+
+
+if TYPE_CHECKING:
+ from typing import Dict # noqa:F401
+ from typing import Optional # noqa:F401
+
+
+@oce.register
+class InsecureCookie(VulnerabilityBase):
+ vulnerability_type = VULN_INSECURE_COOKIE
+ evidence_type = EVIDENCE_COOKIE
+ scrub_evidence = False
+ skip_location = True
+
+
+@oce.register
+class NoHttpOnlyCookie(VulnerabilityBase):
+ vulnerability_type = VULN_NO_HTTPONLY_COOKIE
+ evidence_type = EVIDENCE_COOKIE
+ skip_location = True
+
+
+@oce.register
+class NoSameSite(VulnerabilityBase):
+ vulnerability_type = VULN_NO_SAMESITE_COOKIE
+ evidence_type = EVIDENCE_COOKIE
+ skip_location = True
+
+
+def asm_check_cookies(cookies): # type: (Optional[Dict[str, str]]) -> None
+ if not cookies:
+ return
+
+ for cookie_key, cookie_value in cookies.items():
+ lvalue = cookie_value.lower().replace(" ", "")
+
+ if ";secure" not in lvalue:
+ increment_iast_span_metric(IAST_SPAN_TAGS.TELEMETRY_EXECUTED_SINK, InsecureCookie.vulnerability_type)
+ _set_metric_iast_executed_sink(InsecureCookie.vulnerability_type)
+ InsecureCookie.report(evidence_value=cookie_key)
+
+ if ";httponly" not in lvalue:
+ increment_iast_span_metric(IAST_SPAN_TAGS.TELEMETRY_EXECUTED_SINK, NoHttpOnlyCookie.vulnerability_type)
+ _set_metric_iast_executed_sink(NoHttpOnlyCookie.vulnerability_type)
+ NoHttpOnlyCookie.report(evidence_value=cookie_key)
+
+ if ";samesite=" in lvalue:
+ ss_tokens = lvalue.split(";samesite=")
+ if len(ss_tokens) == 0:
+ report_samesite = True
+ elif ss_tokens[1].startswith("strict") or ss_tokens[1].startswith("lax"):
+ report_samesite = False
+ else:
+ report_samesite = True
+ else:
+ report_samesite = True
+
+ if report_samesite:
+ increment_iast_span_metric(IAST_SPAN_TAGS.TELEMETRY_EXECUTED_SINK, NoSameSite.vulnerability_type)
+ _set_metric_iast_executed_sink(NoSameSite.vulnerability_type)
+ NoSameSite.report(evidence_value=cookie_key)
diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/taint_sinks/path_traversal.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/taint_sinks/path_traversal.py
new file mode 100644
index 0000000..c761800
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/taint_sinks/path_traversal.py
@@ -0,0 +1,70 @@
+from typing import Any
+
+from ddtrace.internal.logger import get_logger
+
+from ..._constants import IAST_SPAN_TAGS
+from .. import oce
+from .._metrics import _set_metric_iast_instrumented_sink
+from .._metrics import increment_iast_span_metric
+from .._patch import set_and_check_module_is_patched
+from .._patch import set_module_unpatched
+from ..constants import EVIDENCE_PATH_TRAVERSAL
+from ..constants import VULN_PATH_TRAVERSAL
+from ..processor import AppSecIastSpanProcessor
+from ._base import VulnerabilityBase
+
+
+log = get_logger(__name__)
+
+
+@oce.register
+class PathTraversal(VulnerabilityBase):
+ vulnerability_type = VULN_PATH_TRAVERSAL
+ evidence_type = EVIDENCE_PATH_TRAVERSAL
+
+ @classmethod
+ def report(cls, evidence_value=None, sources=None):
+ if isinstance(evidence_value, (str, bytes, bytearray)):
+ from .._taint_tracking import taint_ranges_as_evidence_info
+
+ evidence_value, sources = taint_ranges_as_evidence_info(evidence_value)
+ super(PathTraversal, cls).report(evidence_value=evidence_value, sources=sources)
+
+
+def get_version():
+ # type: () -> str
+ return ""
+
+
+def unpatch_iast():
+ # type: () -> None
+ set_module_unpatched("builtins", default_attr="_datadog_path_traversal_patch")
+
+
+def patch():
+ # type: () -> None
+ """Wrap functions which interact with file system."""
+ if not set_and_check_module_is_patched("builtins", default_attr="_datadog_path_traversal_patch"):
+ return
+ _set_metric_iast_instrumented_sink(VULN_PATH_TRAVERSAL)
+
+
+def check_and_report_path_traversal(*args: Any, **kwargs: Any) -> None:
+ if AppSecIastSpanProcessor.is_span_analyzed() and PathTraversal.has_quota():
+ try:
+ from .._metrics import _set_metric_iast_executed_sink
+ from .._taint_tracking import is_pyobject_tainted
+
+ increment_iast_span_metric(IAST_SPAN_TAGS.TELEMETRY_EXECUTED_SINK, PathTraversal.vulnerability_type)
+ _set_metric_iast_executed_sink(PathTraversal.vulnerability_type)
+ if is_pyobject_tainted(args[0]):
+ PathTraversal.report(evidence_value=args[0])
+ except Exception:
+ log.debug("Unexpected exception while reporting vulnerability", exc_info=True)
+ else:
+ log.debug("IAST: no vulnerability quota to analyze more sink points")
+
+
+def open_path_traversal(*args, **kwargs):
+ check_and_report_path_traversal(*args, **kwargs)
+ return open(*args, **kwargs)
diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/taint_sinks/sql_injection.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/taint_sinks/sql_injection.py
new file mode 100644
index 0000000..314bcd6
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/taint_sinks/sql_injection.py
@@ -0,0 +1,44 @@
+import re
+from typing import TYPE_CHECKING # noqa:F401
+
+from .. import oce
+from .._taint_tracking import taint_ranges_as_evidence_info
+from .._utils import _scrub_get_tokens_positions
+from ..constants import EVIDENCE_SQL_INJECTION
+from ..constants import VULN_SQL_INJECTION
+from ._base import VulnerabilityBase
+
+
+if TYPE_CHECKING:
+ from typing import Any # noqa:F401
+ from typing import Dict # noqa:F401
+
+ from .reporter import Vulnerability # noqa:F401
+
+
+_INSIDE_QUOTES_REGEXP = re.compile(r'["\']([^"\']*?)["\']')
+
+
+@oce.register
+class SqlInjection(VulnerabilityBase):
+ vulnerability_type = VULN_SQL_INJECTION
+ evidence_type = EVIDENCE_SQL_INJECTION
+
+ @classmethod
+ def report(cls, evidence_value=None, sources=None):
+ if isinstance(evidence_value, (str, bytes, bytearray)):
+ evidence_value, sources = taint_ranges_as_evidence_info(evidence_value)
+ super(SqlInjection, cls).report(evidence_value=evidence_value, sources=sources)
+
+ @classmethod
+ def _extract_sensitive_tokens(cls, vulns_to_text):
+ # type: (Dict[Vulnerability, str]) -> Dict[int, Dict[str, Any]]
+ ret = {} # type: Dict[int, Dict[str, Any]]
+ for vuln, text in vulns_to_text.items():
+ vuln_hash = hash(vuln)
+ ret[vuln_hash] = {
+ "tokens": set(_INSIDE_QUOTES_REGEXP.findall(text)),
+ }
+ ret[vuln_hash]["token_positions"] = _scrub_get_tokens_positions(text, ret[vuln_hash]["tokens"])
+
+ return ret
diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/taint_sinks/ssrf.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/taint_sinks/ssrf.py
new file mode 100644
index 0000000..5e8a502
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/taint_sinks/ssrf.py
@@ -0,0 +1,175 @@
+import re
+from typing import Callable # noqa:F401
+from typing import Dict # noqa:F401
+from typing import Set # noqa:F401
+
+from ddtrace.internal.logger import get_logger
+from ddtrace.settings.asm import config as asm_config
+
+from ..._constants import IAST_SPAN_TAGS
+from .. import oce
+from .._metrics import increment_iast_span_metric
+from .._taint_tracking import taint_ranges_as_evidence_info
+from .._utils import _has_to_scrub
+from .._utils import _scrub
+from .._utils import _scrub_get_tokens_positions
+from ..constants import EVIDENCE_SSRF
+from ..constants import VULN_SSRF
+from ..constants import VULNERABILITY_TOKEN_TYPE
+from ..processor import AppSecIastSpanProcessor
+from ..reporter import IastSpanReporter # noqa:F401
+from ..reporter import Vulnerability
+from ._base import VulnerabilityBase
+from ._base import _check_positions_contained
+
+
+log = get_logger(__name__)
+
+
+_AUTHORITY_REGEXP = re.compile(r"(?:\/\/([^:@\/]+)(?::([^@\/]+))?@).*")
+_QUERY_FRAGMENT_REGEXP = re.compile(r"[?#&]([^=&;]+)=(?P[^?#&]+)")
+
+
+@oce.register
+class SSRF(VulnerabilityBase):
+ vulnerability_type = VULN_SSRF
+ evidence_type = EVIDENCE_SSRF
+
+ @classmethod
+ def report(cls, evidence_value=None, sources=None):
+ if isinstance(evidence_value, (str, bytes, bytearray)):
+ evidence_value, sources = taint_ranges_as_evidence_info(evidence_value)
+ super(SSRF, cls).report(evidence_value=evidence_value, sources=sources)
+
+ @classmethod
+ def _extract_sensitive_tokens(cls, vulns_to_text: Dict[Vulnerability, str]) -> VULNERABILITY_TOKEN_TYPE:
+ ret = {} # type: VULNERABILITY_TOKEN_TYPE
+ for vuln, text in vulns_to_text.items():
+ vuln_hash = hash(vuln)
+ authority = []
+ authority_found = _AUTHORITY_REGEXP.findall(text)
+ if authority_found:
+ authority = list(authority_found[0])
+ query = [value for param, value in _QUERY_FRAGMENT_REGEXP.findall(text)]
+ ret[vuln_hash] = {
+ "tokens": set(authority + query),
+ }
+ ret[vuln_hash]["token_positions"] = _scrub_get_tokens_positions(text, ret[vuln_hash]["tokens"])
+
+ return ret
+
+ @classmethod
+ def _redact_report(cls, report): # type: (IastSpanReporter) -> IastSpanReporter
+ if not asm_config._iast_redaction_enabled:
+ return report
+
+ # See if there is a match on either any of the sources or value parts of the report
+ found = False
+
+ for source in report.sources:
+ # Join them so we only run the regexps once for each source
+ joined_fields = "%s%s" % (source.name, source.value)
+ if _has_to_scrub(joined_fields):
+ found = True
+ break
+
+ vulns_to_text = {}
+
+ if not found:
+ # Check the evidence's value/s
+ for vuln in report.vulnerabilities:
+ vulnerability_text = cls._get_vulnerability_text(vuln)
+ if _has_to_scrub(vulnerability_text) or _AUTHORITY_REGEXP.match(vulnerability_text):
+ vulns_to_text[vuln] = vulnerability_text
+ found = True
+ break
+
+ if not found:
+ return report
+
+ if not vulns_to_text:
+ vulns_to_text = {vuln: cls._get_vulnerability_text(vuln) for vuln in report.vulnerabilities}
+
+ # If we're here, some potentially sensitive information was found, we delegate on
+ # the specific subclass the task of extracting the variable tokens (e.g. literals inside
+ # quotes for SQL Injection). Note that by just having one potentially sensitive match
+ # we need to then scrub all the tokens, thus why we do it in two steps instead of one
+ vulns_to_tokens = cls._extract_sensitive_tokens(vulns_to_text)
+
+ if not vulns_to_tokens:
+ return report
+
+ all_tokens = set() # type: Set[str]
+ for _, value_dict in vulns_to_tokens.items():
+ all_tokens.update(value_dict["tokens"])
+
+ # Iterate over all the sources, if one of the tokens match it, redact it
+ for source in report.sources:
+ if source.name in "".join(all_tokens) or source.value in "".join(all_tokens):
+ source.pattern = _scrub(source.value, has_range=True)
+ source.redacted = True
+ source.value = None
+
+ # Same for all the evidence values
+ for vuln in report.vulnerabilities:
+ # Use the initial hash directly as iteration key since the vuln itself will change
+ vuln_hash = hash(vuln)
+ if vuln.evidence.value is not None:
+ pattern, replaced = cls.replace_tokens(vuln, vulns_to_tokens, hasattr(vuln.evidence.value, "source"))
+ if replaced:
+ vuln.evidence.pattern = pattern
+ vuln.evidence.redacted = True
+ vuln.evidence.value = None
+ elif vuln.evidence.valueParts is not None:
+ idx = 0
+ new_value_parts = []
+ for part in vuln.evidence.valueParts:
+ value = part["value"]
+ part_len = len(value)
+ part_start = idx
+ part_end = idx + part_len
+ pattern_list = []
+
+ for positions in vulns_to_tokens[vuln_hash]["token_positions"]:
+ if _check_positions_contained(positions, (part_start, part_end)):
+ part_scrub_start = max(positions[0] - idx, 0)
+ part_scrub_end = positions[1] - idx
+ pattern_list.append(value[:part_scrub_start] + "" + value[part_scrub_end:])
+ if part.get("source", False) is not False:
+ source = report.sources[part["source"]]
+ if source.redacted:
+ part["redacted"] = source.redacted
+ part["pattern"] = source.pattern
+ del part["value"]
+ new_value_parts.append(part)
+ break
+ else:
+ part["value"] = "".join(pattern_list)
+ new_value_parts.append(part)
+ new_value_parts.append({"redacted": True})
+ break
+ else:
+ new_value_parts.append(part)
+ pattern_list.append(value[part_start:part_end])
+ break
+
+ idx += part_len
+ vuln.evidence.valueParts = new_value_parts
+ return report
+
+
+def _iast_report_ssrf(func: Callable, *args, **kwargs):
+ from .._metrics import _set_metric_iast_executed_sink
+
+ report_ssrf = kwargs.get("url", False)
+ increment_iast_span_metric(IAST_SPAN_TAGS.TELEMETRY_EXECUTED_SINK, SSRF.vulnerability_type)
+ _set_metric_iast_executed_sink(SSRF.vulnerability_type)
+ if report_ssrf:
+ if AppSecIastSpanProcessor.is_span_analyzed() and SSRF.has_quota():
+ try:
+ from .._taint_tracking import is_pyobject_tainted
+
+ if is_pyobject_tainted(report_ssrf):
+ SSRF.report(evidence_value=report_ssrf)
+ except Exception:
+ log.debug("Unexpected exception while reporting vulnerability", exc_info=True)
diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/taint_sinks/weak_cipher.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/taint_sinks/weak_cipher.py
new file mode 100644
index 0000000..3199528
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/taint_sinks/weak_cipher.py
@@ -0,0 +1,166 @@
+import os
+from typing import TYPE_CHECKING # noqa:F401
+
+from ddtrace.internal.logger import get_logger
+
+from ..._constants import IAST_SPAN_TAGS
+from .. import oce
+from .._metrics import _set_metric_iast_executed_sink
+from .._metrics import _set_metric_iast_instrumented_sink
+from .._metrics import increment_iast_span_metric
+from .._patch import set_and_check_module_is_patched
+from .._patch import set_module_unpatched
+from .._patch import try_unwrap
+from .._patch import try_wrap_function_wrapper
+from ..constants import BLOWFISH_DEF
+from ..constants import DEFAULT_WEAK_CIPHER_ALGORITHMS
+from ..constants import DES_DEF
+from ..constants import EVIDENCE_ALGORITHM_TYPE
+from ..constants import RC2_DEF
+from ..constants import RC4_DEF
+from ..constants import VULN_WEAK_CIPHER_TYPE
+from ._base import VulnerabilityBase
+
+
+if TYPE_CHECKING: # pragma: no cover
+ from typing import Any # noqa:F401
+ from typing import Callable # noqa:F401
+ from typing import Set # noqa:F401
+
+log = get_logger(__name__)
+
+
+def get_weak_cipher_algorithms():
+ # type: () -> Set
+ CONFIGURED_WEAK_CIPHER_ALGORITHMS = None
+ DD_IAST_WEAK_CIPHER_ALGORITHMS = os.getenv("DD_IAST_WEAK_CIPHER_ALGORITHMS")
+ if DD_IAST_WEAK_CIPHER_ALGORITHMS:
+ CONFIGURED_WEAK_CIPHER_ALGORITHMS = set(
+ algo.strip() for algo in DD_IAST_WEAK_CIPHER_ALGORITHMS.lower().split(",")
+ )
+ return CONFIGURED_WEAK_CIPHER_ALGORITHMS or DEFAULT_WEAK_CIPHER_ALGORITHMS
+
+
+@oce.register
+class WeakCipher(VulnerabilityBase):
+ vulnerability_type = VULN_WEAK_CIPHER_TYPE
+ evidence_type = EVIDENCE_ALGORITHM_TYPE
+
+
+def unpatch_iast():
+ # type: () -> None
+ set_module_unpatched("Crypto", default_attr="_datadog_weak_cipher_patch")
+ set_module_unpatched("cryptography", default_attr="_datadog_weak_cipher_patch")
+
+ try_unwrap("Crypto.Cipher.DES", "new")
+ try_unwrap("Crypto.Cipher.Blowfish", "new")
+ try_unwrap("Crypto.Cipher.ARC2", "new")
+ try_unwrap("Crypto.Cipher.ARC4", "ARC4Cipher.encrypt")
+ try_unwrap("Crypto.Cipher._mode_cbc", "CbcMode.encrypt")
+ try_unwrap("Crypto.Cipher._mode_cfb", "CfbMode.encrypt")
+ try_unwrap("Crypto.Cipher._mode_ofb", "OfbMode.encrypt")
+ try_unwrap("cryptography.hazmat.primitives.ciphers", "Cipher.encryptor")
+
+
+def get_version():
+ # type: () -> str
+ return ""
+
+
+def patch():
+ # type: () -> None
+ """Wrap hashing functions.
+ Weak hashing algorithms are those that have been proven to be of high risk, or even completely broken,
+ and thus are not fit for use.
+ """
+ if not set_and_check_module_is_patched("Crypto", default_attr="_datadog_weak_cipher_patch"):
+ return
+ if not set_and_check_module_is_patched("cryptography", default_attr="_datadog_weak_cipher_patch"):
+ return
+
+ weak_cipher_algorithms = get_weak_cipher_algorithms()
+ num_instrumented_sinks = 0
+ # pycryptodome methods
+ if DES_DEF in weak_cipher_algorithms:
+ try_wrap_function_wrapper("Crypto.Cipher.DES", "new", wrapped_aux_des_function)
+ num_instrumented_sinks += 1
+ if BLOWFISH_DEF in weak_cipher_algorithms:
+ try_wrap_function_wrapper("Crypto.Cipher.Blowfish", "new", wrapped_aux_blowfish_function)
+ num_instrumented_sinks += 1
+ if RC2_DEF in weak_cipher_algorithms:
+ try_wrap_function_wrapper("Crypto.Cipher.ARC2", "new", wrapped_aux_rc2_function)
+ num_instrumented_sinks += 1
+ if RC4_DEF in weak_cipher_algorithms:
+ try_wrap_function_wrapper("Crypto.Cipher.ARC4", "ARC4Cipher.encrypt", wrapped_rc4_function)
+ num_instrumented_sinks += 1
+
+ if weak_cipher_algorithms:
+ try_wrap_function_wrapper("Crypto.Cipher._mode_cbc", "CbcMode.encrypt", wrapped_function)
+ try_wrap_function_wrapper("Crypto.Cipher._mode_cfb", "CfbMode.encrypt", wrapped_function)
+ try_wrap_function_wrapper("Crypto.Cipher._mode_ecb", "EcbMode.encrypt", wrapped_function)
+ try_wrap_function_wrapper("Crypto.Cipher._mode_ofb", "OfbMode.encrypt", wrapped_function)
+ num_instrumented_sinks += 4
+
+ # cryptography methods
+ try_wrap_function_wrapper(
+ "cryptography.hazmat.primitives.ciphers", "Cipher.encryptor", wrapped_cryptography_function
+ )
+ num_instrumented_sinks += 1
+
+ _set_metric_iast_instrumented_sink(VULN_WEAK_CIPHER_TYPE, num_instrumented_sinks)
+
+
+def wrapped_aux_rc2_function(wrapped, instance, args, kwargs):
+ result = wrapped(*args, **kwargs)
+ result._dd_weakcipher_algorithm = "RC2"
+ return result
+
+
+def wrapped_aux_des_function(wrapped, instance, args, kwargs):
+ result = wrapped(*args, **kwargs)
+ result._dd_weakcipher_algorithm = "DES"
+ return result
+
+
+def wrapped_aux_blowfish_function(wrapped, instance, args, kwargs):
+ result = wrapped(*args, **kwargs)
+ result._dd_weakcipher_algorithm = "Blowfish"
+ return result
+
+
+@WeakCipher.wrap
+def wrapped_rc4_function(wrapped, instance, args, kwargs):
+ # type: (Callable, Any, Any, Any) -> Any
+ increment_iast_span_metric(IAST_SPAN_TAGS.TELEMETRY_EXECUTED_SINK, WeakCipher.vulnerability_type)
+ _set_metric_iast_executed_sink(WeakCipher.vulnerability_type)
+ WeakCipher.report(
+ evidence_value="RC4",
+ )
+ return wrapped(*args, **kwargs)
+
+
+@WeakCipher.wrap
+def wrapped_function(wrapped, instance, args, kwargs):
+ # type: (Callable, Any, Any, Any) -> Any
+ if hasattr(instance, "_dd_weakcipher_algorithm"):
+ evidence = instance._dd_weakcipher_algorithm + "_" + str(instance.__class__.__name__)
+ increment_iast_span_metric(IAST_SPAN_TAGS.TELEMETRY_EXECUTED_SINK, WeakCipher.vulnerability_type)
+ _set_metric_iast_executed_sink(WeakCipher.vulnerability_type)
+ WeakCipher.report(
+ evidence_value=evidence,
+ )
+
+ return wrapped(*args, **kwargs)
+
+
+@WeakCipher.wrap
+def wrapped_cryptography_function(wrapped, instance, args, kwargs):
+ # type: (Callable, Any, Any, Any) -> Any
+ algorithm_name = instance.algorithm.name.lower()
+ if algorithm_name in get_weak_cipher_algorithms():
+ increment_iast_span_metric(IAST_SPAN_TAGS.TELEMETRY_EXECUTED_SINK, WeakCipher.vulnerability_type)
+ _set_metric_iast_executed_sink(WeakCipher.vulnerability_type)
+ WeakCipher.report(
+ evidence_value=algorithm_name,
+ )
+ return wrapped(*args, **kwargs)
diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/taint_sinks/weak_hash.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/taint_sinks/weak_hash.py
new file mode 100644
index 0000000..9bebaf8
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/taint_sinks/weak_hash.py
@@ -0,0 +1,171 @@
+import os
+import sys
+from typing import TYPE_CHECKING # noqa:F401
+
+from ddtrace.internal.logger import get_logger
+
+from ..._constants import IAST_SPAN_TAGS
+from .. import oce
+from .._metrics import _set_metric_iast_executed_sink
+from .._metrics import _set_metric_iast_instrumented_sink
+from .._metrics import increment_iast_span_metric
+from .._patch import set_and_check_module_is_patched
+from .._patch import set_module_unpatched
+from .._patch import try_unwrap
+from .._patch import try_wrap_function_wrapper
+from ..constants import DEFAULT_WEAK_HASH_ALGORITHMS
+from ..constants import EVIDENCE_ALGORITHM_TYPE
+from ..constants import MD5_DEF
+from ..constants import SHA1_DEF
+from ..constants import VULN_INSECURE_HASHING_TYPE
+from ._base import VulnerabilityBase
+
+
+if TYPE_CHECKING: # pragma: no cover
+ from typing import Any # noqa:F401
+ from typing import Callable # noqa:F401
+ from typing import Set # noqa:F401
+
+log = get_logger(__name__)
+
+
+def get_weak_hash_algorithms():
+ # type: () -> Set
+ CONFIGURED_WEAK_HASH_ALGORITHMS = None
+ DD_IAST_WEAK_HASH_ALGORITHMS = os.getenv("DD_IAST_WEAK_HASH_ALGORITHMS")
+ if DD_IAST_WEAK_HASH_ALGORITHMS:
+ CONFIGURED_WEAK_HASH_ALGORITHMS = set(algo.strip() for algo in DD_IAST_WEAK_HASH_ALGORITHMS.lower().split(","))
+
+ return CONFIGURED_WEAK_HASH_ALGORITHMS or DEFAULT_WEAK_HASH_ALGORITHMS
+
+
+@oce.register
+class WeakHash(VulnerabilityBase):
+ vulnerability_type = VULN_INSECURE_HASHING_TYPE
+ evidence_type = EVIDENCE_ALGORITHM_TYPE
+
+
+def unpatch_iast():
+ # type: () -> None
+ set_module_unpatched("hashlib", default_attr="_datadog_weak_hash_patch")
+ set_module_unpatched("Crypto", default_attr="_datadog_weak_hash_patch")
+
+ if sys.version_info >= (3, 0, 0):
+ try_unwrap("_hashlib", "HASH.digest")
+ try_unwrap("_hashlib", "HASH.hexdigest")
+ try_unwrap(("_%s" % MD5_DEF), "MD5Type.digest")
+ try_unwrap(("_%s" % MD5_DEF), "MD5Type.hexdigest")
+ try_unwrap(("_%s" % SHA1_DEF), "SHA1Type.digest")
+ try_unwrap(("_%s" % SHA1_DEF), "SHA1Type.hexdigest")
+ else:
+ try_unwrap("hashlib", MD5_DEF)
+ try_unwrap("hashlib", SHA1_DEF)
+ try_unwrap("hashlib", "new")
+
+ # pycryptodome methods
+ try_unwrap("Crypto.Hash.MD5", "MD5Hash.digest")
+ try_unwrap("Crypto.Hash.MD5", "MD5Hash.hexdigest")
+ try_unwrap("Crypto.Hash.SHA1", "SHA1Hash.digest")
+ try_unwrap("Crypto.Hash.SHA1", "SHA1Hash.hexdigest")
+
+
+def get_version():
+ # type: () -> str
+ return ""
+
+
+def patch():
+ # type: () -> None
+ """Wrap hashing functions.
+ Weak hashing algorithms are those that have been proven to be of high risk, or even completely broken,
+ and thus are not fit for use.
+ """
+
+ if not set_and_check_module_is_patched("hashlib", default_attr="_datadog_weak_hash_patch"):
+ return
+
+ if not set_and_check_module_is_patched("Crypto", default_attr="_datadog_weak_hash_patch"):
+ return
+
+ weak_hash_algorithms = get_weak_hash_algorithms()
+ num_instrumented_sinks = 0
+ if sys.version_info >= (3, 0, 0):
+ try_wrap_function_wrapper("_hashlib", "HASH.digest", wrapped_digest_function)
+ try_wrap_function_wrapper("_hashlib", "HASH.hexdigest", wrapped_digest_function)
+ num_instrumented_sinks += 2
+ if MD5_DEF in weak_hash_algorithms:
+ try_wrap_function_wrapper(("_%s" % MD5_DEF), "MD5Type.digest", wrapped_md5_function)
+ try_wrap_function_wrapper(("_%s" % MD5_DEF), "MD5Type.hexdigest", wrapped_md5_function)
+ num_instrumented_sinks += 2
+ if SHA1_DEF in weak_hash_algorithms:
+ try_wrap_function_wrapper(("_%s" % SHA1_DEF), "SHA1Type.digest", wrapped_sha1_function)
+ try_wrap_function_wrapper(("_%s" % SHA1_DEF), "SHA1Type.hexdigest", wrapped_sha1_function)
+ num_instrumented_sinks += 2
+ else:
+ if MD5_DEF in weak_hash_algorithms:
+ try_wrap_function_wrapper("hashlib", MD5_DEF, wrapped_md5_function)
+ num_instrumented_sinks += 1
+ if SHA1_DEF in weak_hash_algorithms:
+ try_wrap_function_wrapper("hashlib", SHA1_DEF, wrapped_sha1_function)
+ num_instrumented_sinks += 1
+ try_wrap_function_wrapper("hashlib", "new", wrapped_new_function)
+ num_instrumented_sinks += 1
+
+ # pycryptodome methods
+ if MD5_DEF in weak_hash_algorithms:
+ try_wrap_function_wrapper("Crypto.Hash.MD5", "MD5Hash.digest", wrapped_md5_function)
+ try_wrap_function_wrapper("Crypto.Hash.MD5", "MD5Hash.hexdigest", wrapped_md5_function)
+ num_instrumented_sinks += 2
+ if SHA1_DEF in weak_hash_algorithms:
+ try_wrap_function_wrapper("Crypto.Hash.SHA1", "SHA1Hash.digest", wrapped_sha1_function)
+ try_wrap_function_wrapper("Crypto.Hash.SHA1", "SHA1Hash.hexdigest", wrapped_sha1_function)
+ num_instrumented_sinks += 2
+
+ if num_instrumented_sinks > 0:
+ _set_metric_iast_instrumented_sink(VULN_INSECURE_HASHING_TYPE, num_instrumented_sinks)
+
+
+@WeakHash.wrap
+def wrapped_digest_function(wrapped, instance, args, kwargs):
+ # type: (Callable, Any, Any, Any) -> Any
+ if instance.name.lower() in get_weak_hash_algorithms():
+ increment_iast_span_metric(IAST_SPAN_TAGS.TELEMETRY_EXECUTED_SINK, WeakHash.vulnerability_type)
+ _set_metric_iast_executed_sink(WeakHash.vulnerability_type)
+ WeakHash.report(
+ evidence_value=instance.name,
+ )
+ return wrapped(*args, **kwargs)
+
+
+@WeakHash.wrap
+def wrapped_md5_function(wrapped, instance, args, kwargs):
+ # type: (Callable, Any, Any, Any) -> Any
+ return wrapped_function(wrapped, MD5_DEF, instance, args, kwargs)
+
+
+@WeakHash.wrap
+def wrapped_sha1_function(wrapped, instance, args, kwargs):
+ # type: (Callable, Any, Any, Any) -> Any
+ return wrapped_function(wrapped, SHA1_DEF, instance, args, kwargs)
+
+
+@WeakHash.wrap
+def wrapped_new_function(wrapped, instance, args, kwargs):
+ # type: (Callable, Any, Any, Any) -> Any
+ if args[0].lower() in get_weak_hash_algorithms():
+ increment_iast_span_metric(IAST_SPAN_TAGS.TELEMETRY_EXECUTED_SINK, WeakHash.vulnerability_type)
+ _set_metric_iast_executed_sink(WeakHash.vulnerability_type)
+ WeakHash.report(
+ evidence_value=args[0].lower(),
+ )
+ return wrapped(*args, **kwargs)
+
+
+def wrapped_function(wrapped, evidence, instance, args, kwargs):
+ # type: (Callable, str, Any, Any, Any) -> Any
+ increment_iast_span_metric(IAST_SPAN_TAGS.TELEMETRY_EXECUTED_SINK, WeakHash.vulnerability_type)
+ _set_metric_iast_executed_sink(WeakHash.vulnerability_type)
+ WeakHash.report(
+ evidence_value=evidence,
+ )
+ return wrapped(*args, **kwargs)
diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/taint_sinks/weak_randomness.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/taint_sinks/weak_randomness.py
new file mode 100644
index 0000000..bd7fc6e
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_iast/taint_sinks/weak_randomness.py
@@ -0,0 +1,14 @@
+from .. import oce
+from ..constants import EVIDENCE_WEAK_RANDOMNESS
+from ..constants import VULN_WEAK_RANDOMNESS
+from ._base import VulnerabilityBase
+
+
+@oce.register
+class WeakRandomness(VulnerabilityBase):
+ vulnerability_type = VULN_WEAK_RANDOMNESS
+ evidence_type = EVIDENCE_WEAK_RANDOMNESS
+
+ @classmethod
+ def report(cls, evidence_value=None, sources=None):
+ super(WeakRandomness, cls).report(evidence_value=evidence_value)
diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_metrics.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_metrics.py
new file mode 100644
index 0000000..e4a1a20
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_metrics.py
@@ -0,0 +1,124 @@
+from ddtrace.appsec import _asm_request_context
+from ddtrace.appsec._ddwaf import DDWaf_info
+from ddtrace.appsec._ddwaf import version
+from ddtrace.appsec._deduplications import deduplication
+from ddtrace.internal.logger import get_logger
+
+
+log = get_logger(__name__)
+
+
+@deduplication
+def _set_waf_error_metric(msg: str, stack_trace: str, info: DDWaf_info) -> None:
+ # perf - avoid importing telemetry until needed
+ from ddtrace.internal import telemetry
+
+ try:
+ tags = {
+ "waf_version": version(),
+ "lib_language": "python",
+ }
+ if info and info.version:
+ tags["event_rules_version"] = info.version
+ telemetry.telemetry_writer.add_log("ERROR", msg, stack_trace=stack_trace, tags=tags)
+ except Exception:
+ log.warning("Error reporting ASM WAF logs metrics", exc_info=True)
+
+
+def _set_waf_updates_metric(info):
+ # perf - avoid importing telemetry until needed
+ from ddtrace.internal import telemetry
+ from ddtrace.internal.telemetry.constants import TELEMETRY_NAMESPACE_TAG_APPSEC
+
+ try:
+ if info and info.version:
+ tags = (
+ ("event_rules_version", info.version),
+ ("waf_version", version()),
+ )
+ else:
+ tags = (("waf_version", version()),)
+
+ telemetry.telemetry_writer.add_count_metric(
+ TELEMETRY_NAMESPACE_TAG_APPSEC,
+ "waf.updates",
+ 1.0,
+ tags=tags,
+ )
+ except Exception:
+ log.warning("Error reporting ASM WAF updates metrics", exc_info=True)
+
+
+def _set_waf_init_metric(info):
+ # perf - avoid importing telemetry until needed
+ from ddtrace.internal import telemetry
+ from ddtrace.internal.telemetry.constants import TELEMETRY_NAMESPACE_TAG_APPSEC
+
+ try:
+ if info and info.version:
+ tags = (
+ ("event_rules_version", info.version),
+ ("waf_version", version()),
+ )
+ else:
+ tags = (
+ (
+ "waf_version",
+ version(),
+ ),
+ )
+
+ telemetry.telemetry_writer.add_count_metric(
+ TELEMETRY_NAMESPACE_TAG_APPSEC,
+ "waf.init",
+ 1.0,
+ tags=tags,
+ )
+ except Exception:
+ log.warning("Error reporting ASM WAF init metrics", exc_info=True)
+
+
+def _set_waf_request_metrics(*args):
+ # perf - avoid importing telemetry until needed
+ from ddtrace.internal import telemetry
+ from ddtrace.internal.telemetry.constants import TELEMETRY_NAMESPACE_TAG_APPSEC
+
+ try:
+ list_results, list_result_info, list_is_blocked = _asm_request_context.get_waf_results() or ([], [], [])
+ if any((list_results, list_result_info, list_is_blocked)):
+ is_blocked = any(list_is_blocked)
+ is_triggered = any((result.data for result in list_results))
+ is_timeout = any((result.timeout for result in list_results))
+ # TODO: enable it when Telemetry intake accepts this tag
+ # is_truncation = any((result.truncation for result in list_results))
+ has_info = any(list_result_info)
+
+ if has_info and list_result_info[0].version:
+ tags_request = (
+ (
+ "event_rules_version",
+ list_result_info[0].version,
+ ),
+ ("waf_version", version()),
+ ("rule_triggered", str(is_triggered).lower()),
+ ("request_blocked", str(is_blocked).lower()),
+ ("waf_timeout", str(is_timeout).lower()),
+ )
+ else:
+ tags_request = (
+ ("waf_version", version()),
+ ("rule_triggered", str(is_triggered).lower()),
+ ("request_blocked", str(is_blocked).lower()),
+ ("waf_timeout", str(is_timeout).lower()),
+ )
+
+ telemetry.telemetry_writer.add_count_metric(
+ TELEMETRY_NAMESPACE_TAG_APPSEC,
+ "waf.requests",
+ 1.0,
+ tags=tags_request,
+ )
+ except Exception:
+ log.warning("Error reporting ASM WAF requests metrics", exc_info=True)
+ finally:
+ _asm_request_context.reset_waf_results()
diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_processor.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_processor.py
new file mode 100644
index 0000000..c691486
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_processor.py
@@ -0,0 +1,409 @@
+import dataclasses
+import errno
+import json
+from json.decoder import JSONDecodeError
+import os
+import os.path
+import traceback
+from typing import Any
+from typing import Dict
+from typing import List
+from typing import Optional
+from typing import Set
+from typing import Tuple
+from typing import Union
+
+from ddtrace.appsec import _asm_request_context
+from ddtrace.appsec._capabilities import _appsec_rc_file_is_not_static
+from ddtrace.appsec._constants import APPSEC
+from ddtrace.appsec._constants import DEFAULT
+from ddtrace.appsec._constants import SPAN_DATA_NAMES
+from ddtrace.appsec._constants import WAF_ACTIONS
+from ddtrace.appsec._constants import WAF_CONTEXT_NAMES
+from ddtrace.appsec._constants import WAF_DATA_NAMES
+from ddtrace.appsec._ddwaf.ddwaf_types import ddwaf_context_capsule
+from ddtrace.appsec._metrics import _set_waf_error_metric
+from ddtrace.appsec._metrics import _set_waf_init_metric
+from ddtrace.appsec._metrics import _set_waf_request_metrics
+from ddtrace.appsec._metrics import _set_waf_updates_metric
+from ddtrace.appsec._trace_utils import _asm_manual_keep
+from ddtrace.constants import ORIGIN_KEY
+from ddtrace.constants import RUNTIME_FAMILY
+from ddtrace.ext import SpanTypes
+from ddtrace.internal import core
+from ddtrace.internal.logger import get_logger
+from ddtrace.internal.processor import SpanProcessor
+from ddtrace.internal.rate_limiter import RateLimiter
+from ddtrace.settings.asm import config as asm_config
+from ddtrace.span import Span
+
+
+log = get_logger(__name__)
+
+
+def _transform_headers(data: Union[Dict[str, str], List[Tuple[str, str]]]) -> Dict[str, Union[str, List[str]]]:
+ normalized: Dict[str, Union[str, List[str]]] = {}
+ headers = data if isinstance(data, list) else data.items()
+ for header, value in headers:
+ header = header.lower()
+ if header in ("cookie", "set-cookie"):
+ continue
+ if header in normalized: # if a header with the same lowercase name already exists, let's make it an array
+ existing = normalized[header]
+ if isinstance(existing, list):
+ existing.append(value)
+ else:
+ normalized[header] = [existing, value]
+ else:
+ normalized[header] = value
+ return normalized
+
+
+def get_rules() -> str:
+ return os.getenv("DD_APPSEC_RULES", default=DEFAULT.RULES)
+
+
+def get_appsec_obfuscation_parameter_key_regexp() -> bytes:
+ return os.getenvb(b"DD_APPSEC_OBFUSCATION_PARAMETER_KEY_REGEXP", DEFAULT.APPSEC_OBFUSCATION_PARAMETER_KEY_REGEXP)
+
+
+def get_appsec_obfuscation_parameter_value_regexp() -> bytes:
+ return os.getenvb(
+ b"DD_APPSEC_OBFUSCATION_PARAMETER_VALUE_REGEXP", DEFAULT.APPSEC_OBFUSCATION_PARAMETER_VALUE_REGEXP
+ )
+
+
+_COLLECTED_REQUEST_HEADERS = {
+ "accept",
+ "accept-encoding",
+ "accept-language",
+ "cf-connecting-ip",
+ "cf-connecting-ipv6",
+ "content-encoding",
+ "content-language",
+ "content-length",
+ "content-type",
+ "fastly-client-ip",
+ "forwarded",
+ "forwarded-for",
+ "host",
+ "true-client-ip",
+ "user-agent",
+ "via",
+ "x-client-ip",
+ "x-cluster-client-ip",
+ "x-forwarded",
+ "x-forwarded-for",
+ "x-real-ip",
+}
+
+
+def _set_headers(span: Span, headers: Any, kind: str) -> None:
+ from ddtrace.contrib.trace_utils import _normalize_tag_name
+
+ for k in headers:
+ if isinstance(k, tuple):
+ key, value = k
+ else:
+ key, value = k, headers[k]
+ if key.lower() in _COLLECTED_REQUEST_HEADERS:
+ # since the header value can be a list, use `set_tag()` to ensure it is converted to a string
+ span.set_tag(_normalize_tag_name(kind, key), value)
+
+
+def _get_rate_limiter() -> RateLimiter:
+ return RateLimiter(int(os.getenv("DD_APPSEC_TRACE_RATE_LIMIT", DEFAULT.TRACE_RATE_LIMIT)))
+
+
+@dataclasses.dataclass(eq=False)
+class AppSecSpanProcessor(SpanProcessor):
+ rules: str = dataclasses.field(default_factory=get_rules)
+ obfuscation_parameter_key_regexp: bytes = dataclasses.field(
+ default_factory=get_appsec_obfuscation_parameter_key_regexp
+ )
+ obfuscation_parameter_value_regexp: bytes = dataclasses.field(
+ default_factory=get_appsec_obfuscation_parameter_value_regexp
+ )
+ _addresses_to_keep: Set[str] = dataclasses.field(default_factory=set)
+ _rate_limiter: RateLimiter = dataclasses.field(default_factory=_get_rate_limiter)
+
+ @property
+ def enabled(self):
+ return self._ddwaf is not None
+
+ def __post_init__(self) -> None:
+ from ddtrace.appsec._ddwaf import DDWaf
+
+ try:
+ with open(self.rules, "r") as f:
+ rules = json.load(f)
+ self._update_actions(rules)
+
+ except EnvironmentError as err:
+ if err.errno == errno.ENOENT:
+ log.error("[DDAS-0001-03] ASM could not read the rule file %s. Reason: file does not exist", self.rules)
+ else:
+ # TODO: try to log reasons
+ log.error("[DDAS-0001-03] ASM could not read the rule file %s.", self.rules)
+ raise
+ except JSONDecodeError:
+ log.error("[DDAS-0001-03] ASM could not read the rule file %s. Reason: invalid JSON file", self.rules)
+ raise
+ except Exception:
+ # TODO: try to log reasons
+ log.error("[DDAS-0001-03] ASM could not read the rule file %s.", self.rules)
+ raise
+ try:
+ self._ddwaf = DDWaf(rules, self.obfuscation_parameter_key_regexp, self.obfuscation_parameter_value_regexp)
+ if not self._ddwaf._handle or self._ddwaf.info.failed:
+ stack_trace = "DDWAF.__init__: invalid rules\n ruleset: %s\nloaded:%s\nerrors:%s\n" % (
+ rules,
+ self._ddwaf.info.loaded,
+ self._ddwaf.info.errors,
+ )
+ _set_waf_error_metric("WAF init error. Invalid rules", stack_trace, self._ddwaf.info)
+
+ _set_waf_init_metric(self._ddwaf.info)
+ except ValueError:
+ # Partial of DDAS-0005-00
+ log.warning("[DDAS-0005-00] WAF initialization failed")
+ raise
+ self._update_required()
+
+ def _update_required(self):
+ self._addresses_to_keep.clear()
+ for address in self._ddwaf.required_data:
+ self._addresses_to_keep.add(address)
+ # we always need the request headers
+ self._addresses_to_keep.add(WAF_DATA_NAMES.REQUEST_HEADERS_NO_COOKIES)
+ # we always need the response headers
+ self._addresses_to_keep.add(WAF_DATA_NAMES.RESPONSE_HEADERS_NO_COOKIES)
+
+ def _update_actions(self, rules: Dict[str, Any]) -> None:
+ new_actions = rules.get("actions", [])
+ self._actions: Dict[str, Dict[str, Any]] = WAF_ACTIONS.DEFAULT_ACTIONS
+ for a in new_actions:
+ self._actions[a.get(WAF_ACTIONS.ID, None)] = a
+ if "actions" in rules:
+ del rules["actions"]
+
+ def _update_rules(self, new_rules: Dict[str, Any]) -> bool:
+ result = False
+ if not _appsec_rc_file_is_not_static():
+ return result
+ try:
+ self._update_actions(new_rules)
+ result = self._ddwaf.update_rules(new_rules)
+ _set_waf_updates_metric(self._ddwaf.info)
+ except TypeError:
+ error_msg = "Error updating ASM rules. TypeError exception "
+ log.debug(error_msg, exc_info=True)
+ _set_waf_error_metric(error_msg, traceback.format_exc(), self._ddwaf.info)
+ if not result:
+ error_msg = "Error updating ASM rules. Invalid rules"
+ log.debug(error_msg)
+ _set_waf_error_metric(error_msg, "", self._ddwaf.info)
+ self._update_required()
+ return result
+
+ def on_span_start(self, span: Span) -> None:
+ from ddtrace.contrib import trace_utils
+
+ if span.span_type != SpanTypes.WEB:
+ return
+
+ if _asm_request_context.free_context_available():
+ _asm_request_context.register(span)
+ else:
+ new_asm_context = _asm_request_context.asm_request_context_manager()
+ new_asm_context.__enter__()
+ _asm_request_context.register(span, new_asm_context)
+
+ ctx = self._ddwaf._at_request_start()
+
+ peer_ip = _asm_request_context.get_ip()
+ headers = _asm_request_context.get_headers()
+ headers_case_sensitive = _asm_request_context.get_headers_case_sensitive()
+
+ span.set_metric(APPSEC.ENABLED, 1.0)
+ span.set_tag_str(RUNTIME_FAMILY, "python")
+
+ def waf_callable(custom_data=None):
+ return self._waf_action(span._local_root or span, ctx, custom_data)
+
+ _asm_request_context.set_waf_callback(waf_callable)
+ _asm_request_context.add_context_callback(_set_waf_request_metrics)
+ if headers is not None:
+ _asm_request_context.set_waf_address(SPAN_DATA_NAMES.REQUEST_HEADERS_NO_COOKIES, headers, span)
+ _asm_request_context.set_waf_address(
+ SPAN_DATA_NAMES.REQUEST_HEADERS_NO_COOKIES_CASE, headers_case_sensitive, span
+ )
+ if not peer_ip:
+ return
+
+ ip = trace_utils._get_request_header_client_ip(headers, peer_ip, headers_case_sensitive)
+ # Save the IP and headers in the context so the retrieval can be skipped later
+ _asm_request_context.set_waf_address(SPAN_DATA_NAMES.REQUEST_HTTP_IP, ip, span)
+ if ip and self._is_needed(WAF_DATA_NAMES.REQUEST_HTTP_IP):
+ log.debug("[DDAS-001-00] Executing ASM WAF for checking IP block")
+ # _asm_request_context.call_callback()
+ _asm_request_context.call_waf_callback({"REQUEST_HTTP_IP": None})
+
+ def _waf_action(
+ self, span: Span, ctx: ddwaf_context_capsule, custom_data: Optional[Dict[str, Any]] = None
+ ) -> Optional[Dict[str, Any]]:
+ """
+ Call the `WAF` with the given parameters. If `custom_data_names` is specified as
+ a list of `(WAF_NAME, WAF_STR)` tuples specifying what values of the `WAF_DATA_NAMES`
+ constant class will be checked. Else, it will check all the possible values
+ from `WAF_DATA_NAMES`.
+
+ If `custom_data_values` is specified, it must be a dictionary where the key is the
+ `WAF_DATA_NAMES` key and the value the custom value. If not used, the values will
+ be retrieved from the `core`. This can be used when you don't want to store
+ the value in the `core` before checking the `WAF`.
+ """
+ if span.span_type != SpanTypes.WEB:
+ return None
+
+ if core.get_item(WAF_CONTEXT_NAMES.BLOCKED, span=span) or core.get_item(WAF_CONTEXT_NAMES.BLOCKED):
+ # We still must run the waf if we need to extract schemas for API SECURITY
+ if not custom_data or not custom_data.get("PROCESSOR_SETTINGS", {}).get("extract-schema", False):
+ return None
+
+ data = {}
+ iter_data = [(key, WAF_DATA_NAMES[key]) for key in custom_data] if custom_data is not None else WAF_DATA_NAMES
+ data_already_sent = _asm_request_context.get_data_sent()
+ if data_already_sent is None:
+ data_already_sent = set()
+
+ # type ignore because mypy seems to not detect that both results of the if
+ # above can iter if not None
+ force_keys = custom_data.get("PROCESSOR_SETTINGS", {}).get("extract-schema", False) if custom_data else False
+ for key, waf_name in iter_data: # type: ignore[attr-defined]
+ if key in data_already_sent:
+ continue
+ if self._is_needed(waf_name) or force_keys:
+ value = None
+ if custom_data is not None and custom_data.get(key) is not None:
+ value = custom_data.get(key)
+ elif key in SPAN_DATA_NAMES:
+ value = _asm_request_context.get_value("waf_addresses", SPAN_DATA_NAMES[key])
+ if value is not None:
+ data[waf_name] = _transform_headers(value) if key.endswith("HEADERS_NO_COOKIES") else value
+ data_already_sent.add(key)
+ log.debug("[action] WAF got value %s", SPAN_DATA_NAMES.get(key, key))
+
+ waf_results = self._ddwaf.run(ctx, data, asm_config._waf_timeout)
+ if waf_results and waf_results.data:
+ log.debug("[DDAS-011-00] ASM In-App WAF returned: %s. Timeout %s", waf_results.data, waf_results.timeout)
+
+ for action in waf_results.actions:
+ action_type = self._actions.get(action, {}).get(WAF_ACTIONS.TYPE, None)
+ if action_type == WAF_ACTIONS.BLOCK_ACTION:
+ blocked = self._actions[action][WAF_ACTIONS.PARAMETERS]
+ break
+ elif action_type == WAF_ACTIONS.REDIRECT_ACTION:
+ blocked = self._actions[action][WAF_ACTIONS.PARAMETERS]
+ location = blocked.get("location", "")
+ if not location:
+ blocked = WAF_ACTIONS.DEFAULT_PARAMETERS
+ break
+ status_code = str(blocked.get("status_code", ""))
+ if not (status_code[:3].isdigit() and status_code.startswith("3")):
+ blocked["status_code"] = "303"
+ blocked[WAF_ACTIONS.TYPE] = "none"
+ break
+ else:
+ blocked = {}
+ _asm_request_context.set_waf_results(waf_results, self._ddwaf.info, bool(blocked))
+ if blocked:
+ core.set_item(WAF_CONTEXT_NAMES.BLOCKED, blocked, span=span)
+ core.set_item(WAF_CONTEXT_NAMES.BLOCKED, blocked)
+
+ try:
+ info = self._ddwaf.info
+ if info.errors:
+ errors = json.dumps(info.errors)
+ span.set_tag_str(APPSEC.EVENT_RULE_ERRORS, errors)
+ _set_waf_error_metric("WAF run. Error", errors, info)
+ if waf_results.timeout:
+ _set_waf_error_metric("WAF run. Timeout errors", "", info)
+ span.set_tag_str(APPSEC.EVENT_RULE_VERSION, info.version)
+ from ddtrace.appsec._ddwaf import version
+
+ span.set_tag_str(APPSEC.WAF_VERSION, version())
+
+ def update_metric(name, value):
+ old_value = span.get_metric(name)
+ if old_value is None:
+ old_value = 0.0
+ span.set_metric(name, value + old_value)
+
+ span.set_metric(APPSEC.EVENT_RULE_LOADED, info.loaded)
+ span.set_metric(APPSEC.EVENT_RULE_ERROR_COUNT, info.failed)
+ if waf_results:
+ update_metric(APPSEC.WAF_DURATION, waf_results.runtime)
+ update_metric(APPSEC.WAF_DURATION_EXT, waf_results.total_runtime)
+ except (JSONDecodeError, ValueError):
+ log.warning("Error parsing data ASM In-App WAF metrics report %s", info.errors)
+ except Exception:
+ log.warning("Error executing ASM In-App WAF metrics report: %s", exc_info=True)
+
+ if (waf_results and waf_results.data) or blocked:
+ # We run the rate limiter only if there is an attack, its goal is to limit the number of collected asm
+ # events
+ allowed = self._rate_limiter.is_allowed(span.start_ns)
+ if not allowed:
+ # TODO: add metric collection to keep an eye (when it's name is clarified)
+ return waf_results.derivatives
+
+ for id_tag, kind in [
+ (SPAN_DATA_NAMES.REQUEST_HEADERS_NO_COOKIES, "request"),
+ (SPAN_DATA_NAMES.RESPONSE_HEADERS_NO_COOKIES, "response"),
+ ]:
+ headers_req = _asm_request_context.get_waf_address(id_tag)
+ if headers_req:
+ _set_headers(span, headers_req, kind=kind)
+
+ _asm_request_context.store_waf_results_data(waf_results.data)
+ if blocked:
+ span.set_tag(APPSEC.BLOCKED, "true")
+ _set_waf_request_metrics()
+
+ # Partial DDAS-011-00
+ span.set_tag_str(APPSEC.EVENT, "true")
+
+ remote_ip = _asm_request_context.get_waf_address(SPAN_DATA_NAMES.REQUEST_HTTP_IP)
+ if remote_ip:
+ # Note that if the ip collection is disabled by the env var
+ # DD_TRACE_CLIENT_IP_HEADER_DISABLED actor.ip won't be sent
+ span.set_tag_str("actor.ip", remote_ip)
+
+ # Right now, we overwrite any value that could be already there. We need to reconsider when ASM/AppSec's
+ # specs are updated.
+ _asm_manual_keep(span)
+ if span.get_tag(ORIGIN_KEY) is None:
+ span.set_tag_str(ORIGIN_KEY, APPSEC.ORIGIN_VALUE)
+ return waf_results.derivatives
+
+ def _is_needed(self, address: str) -> bool:
+ return address in self._addresses_to_keep
+
+ def on_span_finish(self, span: Span) -> None:
+ try:
+ if span.span_type == SpanTypes.WEB:
+ # Force to set respond headers at the end
+ headers_req = core.get_item(SPAN_DATA_NAMES.RESPONSE_HEADERS_NO_COOKIES, span=span)
+ if headers_req:
+ _set_headers(span, headers_req, kind="response")
+
+ # this call is only necessary for tests or frameworks that are not using blocking
+ if span.get_tag(APPSEC.JSON) is None and _asm_request_context.in_context():
+ log.debug("metrics waf call")
+ _asm_request_context.call_waf_callback()
+
+ self._ddwaf._at_request_end()
+ finally:
+ # release asm context if it was created by the span
+ _asm_request_context.unregister(span)
diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_python_info/__init__.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_python_info/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_python_info/stdlib/__init__.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_python_info/stdlib/__init__.py
new file mode 100644
index 0000000..84d0af7
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_python_info/stdlib/__init__.py
@@ -0,0 +1,21 @@
+#!/usr/bin/env python3
+
+from sys import version_info
+
+
+if version_info < (3, 7, 0):
+ from .module_names_py36 import STDLIB_MODULE_NAMES
+elif version_info < (3, 8, 0):
+ from .module_names_py37 import STDLIB_MODULE_NAMES
+elif version_info < (3, 9, 0):
+ from .module_names_py38 import STDLIB_MODULE_NAMES
+elif version_info < (3, 10, 0):
+ from .module_names_py39 import STDLIB_MODULE_NAMES
+elif version_info < (3, 11, 0):
+ from .module_names_py310 import STDLIB_MODULE_NAMES
+else:
+ from .module_names_py311 import STDLIB_MODULE_NAMES
+
+
+def _stdlib_for_python_version(): # type: () -> set
+ return STDLIB_MODULE_NAMES
diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_python_info/stdlib/module_names_py310.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_python_info/stdlib/module_names_py310.py
new file mode 100644
index 0000000..338f807
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_python_info/stdlib/module_names_py310.py
@@ -0,0 +1,218 @@
+STDLIB_MODULE_NAMES = {
+ "__future__",
+ "_ast",
+ "_compression",
+ "_thread",
+ "abc",
+ "aifc",
+ "argparse",
+ "array",
+ "ast",
+ "asynchat",
+ "asyncio",
+ "asyncore",
+ "atexit",
+ "audioop",
+ "base64",
+ "bdb",
+ "binascii",
+ "binhex",
+ "bisect",
+ "builtins",
+ "bz2",
+ "cProfile",
+ "calendar",
+ "cgi",
+ "cgitb",
+ "chunk",
+ "cmath",
+ "cmd",
+ "code",
+ "codecs",
+ "codeop",
+ "collections",
+ "colorsys",
+ "compileall",
+ "concurrent",
+ "configparser",
+ "contextlib",
+ "contextvars",
+ "copy",
+ "copyreg",
+ "crypt",
+ "csv",
+ "ctypes",
+ "curses",
+ "dataclasses",
+ "datetime",
+ "dbm",
+ "decimal",
+ "difflib",
+ "dis",
+ "distutils",
+ "doctest",
+ "email",
+ "encodings",
+ "ensurepip",
+ "enum",
+ "errno",
+ "faulthandler",
+ "fcntl",
+ "filecmp",
+ "fileinput",
+ "fnmatch",
+ "fractions",
+ "ftplib",
+ "functools",
+ "gc",
+ "getopt",
+ "getpass",
+ "gettext",
+ "glob",
+ "graphlib",
+ "grp",
+ "gzip",
+ "hashlib",
+ "heapq",
+ "hmac",
+ "html",
+ "http",
+ "idlelib",
+ "imaplib",
+ "imghdr",
+ "imp",
+ "importlib",
+ "inspect",
+ "io",
+ "ipaddress",
+ "itertools",
+ "json",
+ "keyword",
+ "lib2to3",
+ "linecache",
+ "locale",
+ "logging",
+ "lzma",
+ "mailbox",
+ "mailcap",
+ "marshal",
+ "math",
+ "mimetypes",
+ "mmap",
+ "modulefinder",
+ "msilib",
+ "msvcrt",
+ "multiprocessing",
+ "netrc",
+ "nis",
+ "nntplib",
+ "ntpath",
+ "numbers",
+ "opcode",
+ "operator",
+ "optparse",
+ "os",
+ "ossaudiodev",
+ "pathlib",
+ "pdb",
+ "pickle",
+ "pickletools",
+ "pipes",
+ "pkgutil",
+ "platform",
+ "plistlib",
+ "poplib",
+ "posix",
+ "posixpath",
+ "pprint",
+ "profile",
+ "pstats",
+ "pty",
+ "pwd",
+ "py_compile",
+ "pyclbr",
+ "pydoc",
+ "queue",
+ "quopri",
+ "random",
+ "re",
+ "readline",
+ "reprlib",
+ "resource",
+ "rlcompleter",
+ "runpy",
+ "sched",
+ "secrets",
+ "select",
+ "selectors",
+ "shelve",
+ "shlex",
+ "shutil",
+ "signal",
+ "site",
+ "smtpd",
+ "smtplib",
+ "sndhdr",
+ "socket",
+ "socketserver",
+ "spwd",
+ "sqlite3",
+ "sre",
+ "sre_compile",
+ "sre_constants",
+ "sre_parse",
+ "ssl",
+ "stat",
+ "statistics",
+ "string",
+ "stringprep",
+ "struct",
+ "subprocess",
+ "sunau",
+ "symtable",
+ "sys",
+ "sysconfig",
+ "syslog",
+ "tabnanny",
+ "tarfile",
+ "telnetlib",
+ "tempfile",
+ "termios",
+ "test",
+ "textwrap",
+ "threading",
+ "time",
+ "timeit",
+ "tkinter",
+ "token",
+ "tokenize",
+ "trace",
+ "traceback",
+ "tracemalloc",
+ "tty",
+ "turtle",
+ "turtledemo",
+ "types",
+ "typing",
+ "unicodedata",
+ "unittest",
+ "urllib",
+ "uu",
+ "uuid",
+ "venv",
+ "warnings",
+ "wave",
+ "weakref",
+ "webbrowser",
+ "winreg",
+ "winsound",
+ "wsgiref",
+ "xdrlib",
+ "xml",
+ "xmlrpc",
+ "zipapp",
+ "zipfile",
+ "zipimport",
+ "zlib",
+ "zoneinfo",
+}
diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_python_info/stdlib/module_names_py311.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_python_info/stdlib/module_names_py311.py
new file mode 100644
index 0000000..47030a1
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_python_info/stdlib/module_names_py311.py
@@ -0,0 +1,218 @@
+STDLIB_MODULE_NAMES = {
+ "__future__",
+ "_ast",
+ "_compression",
+ "_thread",
+ "abc",
+ "aifc",
+ "argparse",
+ "array",
+ "ast",
+ "asynchat",
+ "asyncio",
+ "asyncore",
+ "atexit",
+ "audioop",
+ "base64",
+ "bdb",
+ "binascii",
+ "bisect",
+ "builtins",
+ "bz2",
+ "cProfile",
+ "calendar",
+ "cgi",
+ "cgitb",
+ "chunk",
+ "cmath",
+ "cmd",
+ "code",
+ "codecs",
+ "codeop",
+ "collections",
+ "colorsys",
+ "compileall",
+ "concurrent",
+ "configparser",
+ "contextlib",
+ "contextvars",
+ "copy",
+ "copyreg",
+ "crypt",
+ "csv",
+ "ctypes",
+ "curses",
+ "dataclasses",
+ "datetime",
+ "dbm",
+ "decimal",
+ "difflib",
+ "dis",
+ "distutils",
+ "doctest",
+ "email",
+ "encodings",
+ "ensurepip",
+ "enum",
+ "errno",
+ "faulthandler",
+ "fcntl",
+ "filecmp",
+ "fileinput",
+ "fnmatch",
+ "fractions",
+ "ftplib",
+ "functools",
+ "gc",
+ "getopt",
+ "getpass",
+ "gettext",
+ "glob",
+ "graphlib",
+ "grp",
+ "gzip",
+ "hashlib",
+ "heapq",
+ "hmac",
+ "html",
+ "http",
+ "idlelib",
+ "imaplib",
+ "imghdr",
+ "imp",
+ "importlib",
+ "inspect",
+ "io",
+ "ipaddress",
+ "itertools",
+ "json",
+ "keyword",
+ "lib2to3",
+ "linecache",
+ "locale",
+ "logging",
+ "lzma",
+ "mailbox",
+ "mailcap",
+ "marshal",
+ "math",
+ "mimetypes",
+ "mmap",
+ "modulefinder",
+ "msilib",
+ "msvcrt",
+ "multiprocessing",
+ "netrc",
+ "nis",
+ "nntplib",
+ "ntpath",
+ "numbers",
+ "opcode",
+ "operator",
+ "optparse",
+ "os",
+ "ossaudiodev",
+ "pathlib",
+ "pdb",
+ "pickle",
+ "pickletools",
+ "pipes",
+ "pkgutil",
+ "platform",
+ "plistlib",
+ "poplib",
+ "posix",
+ "posixpath",
+ "pprint",
+ "profile",
+ "pstats",
+ "pty",
+ "pwd",
+ "py_compile",
+ "pyclbr",
+ "pydoc",
+ "queue",
+ "quopri",
+ "random",
+ "re",
+ "readline",
+ "reprlib",
+ "resource",
+ "rlcompleter",
+ "runpy",
+ "sched",
+ "secrets",
+ "select",
+ "selectors",
+ "shelve",
+ "shlex",
+ "shutil",
+ "signal",
+ "site",
+ "smtpd",
+ "smtplib",
+ "sndhdr",
+ "socket",
+ "socketserver",
+ "spwd",
+ "sqlite3",
+ "sre",
+ "sre_compile",
+ "sre_constants",
+ "sre_parse",
+ "ssl",
+ "stat",
+ "statistics",
+ "string",
+ "stringprep",
+ "struct",
+ "subprocess",
+ "sunau",
+ "symtable",
+ "sys",
+ "sysconfig",
+ "syslog",
+ "tabnanny",
+ "tarfile",
+ "telnetlib",
+ "tempfile",
+ "termios",
+ "test",
+ "textwrap",
+ "threading",
+ "time",
+ "timeit",
+ "tkinter",
+ "token",
+ "tokenize",
+ "tomllib",
+ "trace",
+ "traceback",
+ "tracemalloc",
+ "tty",
+ "turtle",
+ "turtledemo",
+ "types",
+ "typing",
+ "unicodedata",
+ "unittest",
+ "urllib",
+ "uu",
+ "uuid",
+ "venv",
+ "warnings",
+ "wave",
+ "weakref",
+ "webbrowser",
+ "winreg",
+ "winsound",
+ "wsgiref",
+ "xdrlib",
+ "xml",
+ "xmlrpc",
+ "zipapp",
+ "zipfile",
+ "zipimport",
+ "zlib",
+ "zoneinfo",
+}
diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_python_info/stdlib/module_names_py36.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_python_info/stdlib/module_names_py36.py
new file mode 100644
index 0000000..c4eb7c6
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_python_info/stdlib/module_names_py36.py
@@ -0,0 +1,220 @@
+STDLIB_MODULE_NAMES = {
+ "__future__",
+ "_ast",
+ "_compression",
+ "_dummy_thread",
+ "_thread",
+ "abc",
+ "aifc",
+ "argparse",
+ "array",
+ "ast",
+ "asynchat",
+ "asyncio",
+ "asyncore",
+ "atexit",
+ "audioop",
+ "base64",
+ "bdb",
+ "binascii",
+ "binhex",
+ "bisect",
+ "builtins",
+ "bz2",
+ "cProfile",
+ "calendar",
+ "cgi",
+ "cgitb",
+ "chunk",
+ "cmath",
+ "cmd",
+ "code",
+ "codecs",
+ "codeop",
+ "collections",
+ "colorsys",
+ "compileall",
+ "concurrent",
+ "configparser",
+ "contextlib",
+ "copy",
+ "copyreg",
+ "crypt",
+ "csv",
+ "ctypes",
+ "curses",
+ "datetime",
+ "dbm",
+ "decimal",
+ "difflib",
+ "dis",
+ "distutils",
+ "doctest",
+ "dummy_threading",
+ "email",
+ "encodings",
+ "ensurepip",
+ "enum",
+ "errno",
+ "faulthandler",
+ "fcntl",
+ "filecmp",
+ "fileinput",
+ "fnmatch",
+ "formatter",
+ "fpectl",
+ "fractions",
+ "ftplib",
+ "functools",
+ "gc",
+ "getopt",
+ "getpass",
+ "gettext",
+ "glob",
+ "grp",
+ "gzip",
+ "hashlib",
+ "heapq",
+ "hmac",
+ "html",
+ "http",
+ "imaplib",
+ "imghdr",
+ "imp",
+ "importlib",
+ "inspect",
+ "io",
+ "ipaddress",
+ "itertools",
+ "json",
+ "keyword",
+ "lib2to3",
+ "linecache",
+ "locale",
+ "logging",
+ "lzma",
+ "macpath",
+ "mailbox",
+ "mailcap",
+ "marshal",
+ "math",
+ "mimetypes",
+ "mmap",
+ "modulefinder",
+ "msilib",
+ "msvcrt",
+ "multiprocessing",
+ "netrc",
+ "nis",
+ "nntplib",
+ "ntpath",
+ "numbers",
+ "opcode",
+ "operator",
+ "optparse",
+ "os",
+ "ossaudiodev",
+ "parser",
+ "pathlib",
+ "pdb",
+ "pickle",
+ "pickletools",
+ "pipes",
+ "pkgutil",
+ "platform",
+ "plistlib",
+ "poplib",
+ "posix",
+ "posixpath",
+ "pprint",
+ "profile",
+ "pstats",
+ "pty",
+ "pwd",
+ "py_compile",
+ "pyclbr",
+ "pydoc",
+ "queue",
+ "quopri",
+ "random",
+ "re",
+ "readline",
+ "reprlib",
+ "resource",
+ "rlcompleter",
+ "runpy",
+ "sched",
+ "secrets",
+ "select",
+ "selectors",
+ "shelve",
+ "shlex",
+ "shutil",
+ "signal",
+ "site",
+ "smtpd",
+ "smtplib",
+ "sndhdr",
+ "socket",
+ "socketserver",
+ "spwd",
+ "sqlite3",
+ "sre",
+ "sre_compile",
+ "sre_constants",
+ "sre_parse",
+ "ssl",
+ "stat",
+ "statistics",
+ "string",
+ "stringprep",
+ "struct",
+ "subprocess",
+ "sunau",
+ "symbol",
+ "symtable",
+ "sys",
+ "sysconfig",
+ "syslog",
+ "tabnanny",
+ "tarfile",
+ "telnetlib",
+ "tempfile",
+ "termios",
+ "test",
+ "textwrap",
+ "threading",
+ "time",
+ "timeit",
+ "tkinter",
+ "token",
+ "tokenize",
+ "trace",
+ "traceback",
+ "tracemalloc",
+ "tty",
+ "turtle",
+ "turtledemo",
+ "types",
+ "typing",
+ "unicodedata",
+ "unittest",
+ "urllib",
+ "uu",
+ "uuid",
+ "venv",
+ "warnings",
+ "wave",
+ "weakref",
+ "webbrowser",
+ "winreg",
+ "winsound",
+ "wsgiref",
+ "xdrlib",
+ "xml",
+ "xmlrpc",
+ "zipapp",
+ "zipfile",
+ "zipimport",
+ "zlib",
+}
diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_python_info/stdlib/module_names_py37.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_python_info/stdlib/module_names_py37.py
new file mode 100644
index 0000000..0f989b2
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_python_info/stdlib/module_names_py37.py
@@ -0,0 +1,221 @@
+STDLIB_MODULE_NAMES = {
+ "__future__",
+ "_ast",
+ "_compression",
+ "_dummy_thread",
+ "_thread",
+ "abc",
+ "aifc",
+ "argparse",
+ "array",
+ "ast",
+ "asynchat",
+ "asyncio",
+ "asyncore",
+ "atexit",
+ "audioop",
+ "base64",
+ "bdb",
+ "binascii",
+ "binhex",
+ "bisect",
+ "builtins",
+ "bz2",
+ "cProfile",
+ "calendar",
+ "cgi",
+ "cgitb",
+ "chunk",
+ "cmath",
+ "cmd",
+ "code",
+ "codecs",
+ "codeop",
+ "collections",
+ "colorsys",
+ "compileall",
+ "concurrent",
+ "configparser",
+ "contextlib",
+ "contextvars",
+ "copy",
+ "copyreg",
+ "crypt",
+ "csv",
+ "ctypes",
+ "curses",
+ "dataclasses",
+ "datetime",
+ "dbm",
+ "decimal",
+ "difflib",
+ "dis",
+ "distutils",
+ "doctest",
+ "dummy_threading",
+ "email",
+ "encodings",
+ "ensurepip",
+ "enum",
+ "errno",
+ "faulthandler",
+ "fcntl",
+ "filecmp",
+ "fileinput",
+ "fnmatch",
+ "formatter",
+ "fractions",
+ "ftplib",
+ "functools",
+ "gc",
+ "getopt",
+ "getpass",
+ "gettext",
+ "glob",
+ "grp",
+ "gzip",
+ "hashlib",
+ "heapq",
+ "hmac",
+ "html",
+ "http",
+ "imaplib",
+ "imghdr",
+ "imp",
+ "importlib",
+ "inspect",
+ "io",
+ "ipaddress",
+ "itertools",
+ "json",
+ "keyword",
+ "lib2to3",
+ "linecache",
+ "locale",
+ "logging",
+ "lzma",
+ "macpath",
+ "mailbox",
+ "mailcap",
+ "marshal",
+ "math",
+ "mimetypes",
+ "mmap",
+ "modulefinder",
+ "msilib",
+ "msvcrt",
+ "multiprocessing",
+ "netrc",
+ "nis",
+ "nntplib",
+ "ntpath",
+ "numbers",
+ "opcode",
+ "operator",
+ "optparse",
+ "os",
+ "ossaudiodev",
+ "parser",
+ "pathlib",
+ "pdb",
+ "pickle",
+ "pickletools",
+ "pipes",
+ "pkgutil",
+ "platform",
+ "plistlib",
+ "poplib",
+ "posix",
+ "posixpath",
+ "pprint",
+ "profile",
+ "pstats",
+ "pty",
+ "pwd",
+ "py_compile",
+ "pyclbr",
+ "pydoc",
+ "queue",
+ "quopri",
+ "random",
+ "re",
+ "readline",
+ "reprlib",
+ "resource",
+ "rlcompleter",
+ "runpy",
+ "sched",
+ "secrets",
+ "select",
+ "selectors",
+ "shelve",
+ "shlex",
+ "shutil",
+ "signal",
+ "site",
+ "smtpd",
+ "smtplib",
+ "sndhdr",
+ "socket",
+ "socketserver",
+ "spwd",
+ "sqlite3",
+ "sre",
+ "sre_compile",
+ "sre_constants",
+ "sre_parse",
+ "ssl",
+ "stat",
+ "statistics",
+ "string",
+ "stringprep",
+ "struct",
+ "subprocess",
+ "sunau",
+ "symbol",
+ "symtable",
+ "sys",
+ "sysconfig",
+ "syslog",
+ "tabnanny",
+ "tarfile",
+ "telnetlib",
+ "tempfile",
+ "termios",
+ "test",
+ "textwrap",
+ "threading",
+ "time",
+ "timeit",
+ "tkinter",
+ "token",
+ "tokenize",
+ "trace",
+ "traceback",
+ "tracemalloc",
+ "tty",
+ "turtle",
+ "turtledemo",
+ "types",
+ "typing",
+ "unicodedata",
+ "unittest",
+ "urllib",
+ "uu",
+ "uuid",
+ "venv",
+ "warnings",
+ "wave",
+ "weakref",
+ "webbrowser",
+ "winreg",
+ "winsound",
+ "wsgiref",
+ "xdrlib",
+ "xml",
+ "xmlrpc",
+ "zipapp",
+ "zipfile",
+ "zipimport",
+ "zlib",
+}
diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_python_info/stdlib/module_names_py38.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_python_info/stdlib/module_names_py38.py
new file mode 100644
index 0000000..1e5be0b
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_python_info/stdlib/module_names_py38.py
@@ -0,0 +1,220 @@
+STDLIB_MODULE_NAMES = {
+ "__future__",
+ "_ast",
+ "_compression",
+ "_dummy_thread",
+ "_thread",
+ "abc",
+ "aifc",
+ "argparse",
+ "array",
+ "ast",
+ "asynchat",
+ "asyncio",
+ "asyncore",
+ "atexit",
+ "audioop",
+ "base64",
+ "bdb",
+ "binascii",
+ "binhex",
+ "bisect",
+ "builtins",
+ "bz2",
+ "cProfile",
+ "calendar",
+ "cgi",
+ "cgitb",
+ "chunk",
+ "cmath",
+ "cmd",
+ "code",
+ "codecs",
+ "codeop",
+ "collections",
+ "colorsys",
+ "compileall",
+ "concurrent",
+ "configparser",
+ "contextlib",
+ "contextvars",
+ "copy",
+ "copyreg",
+ "crypt",
+ "csv",
+ "ctypes",
+ "curses",
+ "dataclasses",
+ "datetime",
+ "dbm",
+ "decimal",
+ "difflib",
+ "dis",
+ "distutils",
+ "doctest",
+ "dummy_threading",
+ "email",
+ "encodings",
+ "ensurepip",
+ "enum",
+ "errno",
+ "faulthandler",
+ "fcntl",
+ "filecmp",
+ "fileinput",
+ "fnmatch",
+ "formatter",
+ "fractions",
+ "ftplib",
+ "functools",
+ "gc",
+ "getopt",
+ "getpass",
+ "gettext",
+ "glob",
+ "grp",
+ "gzip",
+ "hashlib",
+ "heapq",
+ "hmac",
+ "html",
+ "http",
+ "imaplib",
+ "imghdr",
+ "imp",
+ "importlib",
+ "inspect",
+ "io",
+ "ipaddress",
+ "itertools",
+ "json",
+ "keyword",
+ "lib2to3",
+ "linecache",
+ "locale",
+ "logging",
+ "lzma",
+ "mailbox",
+ "mailcap",
+ "marshal",
+ "math",
+ "mimetypes",
+ "mmap",
+ "modulefinder",
+ "msilib",
+ "msvcrt",
+ "multiprocessing",
+ "netrc",
+ "nis",
+ "nntplib",
+ "ntpath",
+ "numbers",
+ "opcode",
+ "operator",
+ "optparse",
+ "os",
+ "ossaudiodev",
+ "parser",
+ "pathlib",
+ "pdb",
+ "pickle",
+ "pickletools",
+ "pipes",
+ "pkgutil",
+ "platform",
+ "plistlib",
+ "poplib",
+ "posix",
+ "posixpath",
+ "pprint",
+ "profile",
+ "pstats",
+ "pty",
+ "pwd",
+ "py_compile",
+ "pyclbr",
+ "pydoc",
+ "queue",
+ "quopri",
+ "random",
+ "re",
+ "readline",
+ "reprlib",
+ "resource",
+ "rlcompleter",
+ "runpy",
+ "sched",
+ "secrets",
+ "select",
+ "selectors",
+ "shelve",
+ "shlex",
+ "shutil",
+ "signal",
+ "site",
+ "smtpd",
+ "smtplib",
+ "sndhdr",
+ "socket",
+ "socketserver",
+ "spwd",
+ "sqlite3",
+ "sre",
+ "sre_compile",
+ "sre_constants",
+ "sre_parse",
+ "ssl",
+ "stat",
+ "statistics",
+ "string",
+ "stringprep",
+ "struct",
+ "subprocess",
+ "sunau",
+ "symbol",
+ "symtable",
+ "sys",
+ "sysconfig",
+ "syslog",
+ "tabnanny",
+ "tarfile",
+ "telnetlib",
+ "tempfile",
+ "termios",
+ "test",
+ "textwrap",
+ "threading",
+ "time",
+ "timeit",
+ "tkinter",
+ "token",
+ "tokenize",
+ "trace",
+ "traceback",
+ "tracemalloc",
+ "tty",
+ "turtle",
+ "turtledemo",
+ "types",
+ "typing",
+ "unicodedata",
+ "unittest",
+ "urllib",
+ "uu",
+ "uuid",
+ "venv",
+ "warnings",
+ "wave",
+ "weakref",
+ "webbrowser",
+ "winreg",
+ "winsound",
+ "wsgiref",
+ "xdrlib",
+ "xml",
+ "xmlrpc",
+ "zipapp",
+ "zipfile",
+ "zipimport",
+ "zlib",
+}
diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_python_info/stdlib/module_names_py39.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_python_info/stdlib/module_names_py39.py
new file mode 100644
index 0000000..6bdc900
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_python_info/stdlib/module_names_py39.py
@@ -0,0 +1,220 @@
+STDLIB_MODULE_NAMES = {
+ "__future__",
+ "_ast",
+ "_compression",
+ "_thread",
+ "abc",
+ "aifc",
+ "argparse",
+ "array",
+ "ast",
+ "asynchat",
+ "asyncio",
+ "asyncore",
+ "atexit",
+ "audioop",
+ "base64",
+ "bdb",
+ "binascii",
+ "binhex",
+ "bisect",
+ "builtins",
+ "bz2",
+ "cProfile",
+ "calendar",
+ "cgi",
+ "cgitb",
+ "chunk",
+ "cmath",
+ "cmd",
+ "code",
+ "codecs",
+ "codeop",
+ "collections",
+ "colorsys",
+ "compileall",
+ "concurrent",
+ "configparser",
+ "contextlib",
+ "contextvars",
+ "copy",
+ "copyreg",
+ "crypt",
+ "csv",
+ "ctypes",
+ "curses",
+ "dataclasses",
+ "datetime",
+ "dbm",
+ "decimal",
+ "difflib",
+ "dis",
+ "distutils",
+ "doctest",
+ "email",
+ "encodings",
+ "ensurepip",
+ "enum",
+ "errno",
+ "faulthandler",
+ "fcntl",
+ "filecmp",
+ "fileinput",
+ "fnmatch",
+ "formatter",
+ "fractions",
+ "ftplib",
+ "functools",
+ "gc",
+ "getopt",
+ "getpass",
+ "gettext",
+ "glob",
+ "graphlib",
+ "grp",
+ "gzip",
+ "hashlib",
+ "heapq",
+ "hmac",
+ "html",
+ "http",
+ "imaplib",
+ "imghdr",
+ "imp",
+ "importlib",
+ "inspect",
+ "io",
+ "ipaddress",
+ "itertools",
+ "json",
+ "keyword",
+ "lib2to3",
+ "linecache",
+ "locale",
+ "logging",
+ "lzma",
+ "mailbox",
+ "mailcap",
+ "marshal",
+ "math",
+ "mimetypes",
+ "mmap",
+ "modulefinder",
+ "msilib",
+ "msvcrt",
+ "multiprocessing",
+ "netrc",
+ "nis",
+ "nntplib",
+ "ntpath",
+ "numbers",
+ "opcode",
+ "operator",
+ "optparse",
+ "os",
+ "ossaudiodev",
+ "parser",
+ "pathlib",
+ "pdb",
+ "pickle",
+ "pickletools",
+ "pipes",
+ "pkgutil",
+ "platform",
+ "plistlib",
+ "poplib",
+ "posix",
+ "posixpath",
+ "pprint",
+ "profile",
+ "pstats",
+ "pty",
+ "pwd",
+ "py_compile",
+ "pyclbr",
+ "pydoc",
+ "queue",
+ "quopri",
+ "random",
+ "re",
+ "readline",
+ "reprlib",
+ "resource",
+ "rlcompleter",
+ "runpy",
+ "sched",
+ "secrets",
+ "select",
+ "selectors",
+ "shelve",
+ "shlex",
+ "shutil",
+ "signal",
+ "site",
+ "smtpd",
+ "smtplib",
+ "sndhdr",
+ "socket",
+ "socketserver",
+ "spwd",
+ "sqlite3",
+ "sre",
+ "sre_compile",
+ "sre_constants",
+ "sre_parse",
+ "ssl",
+ "stat",
+ "statistics",
+ "string",
+ "stringprep",
+ "struct",
+ "subprocess",
+ "sunau",
+ "symbol",
+ "symtable",
+ "sys",
+ "sysconfig",
+ "syslog",
+ "tabnanny",
+ "tarfile",
+ "telnetlib",
+ "tempfile",
+ "termios",
+ "test",
+ "textwrap",
+ "threading",
+ "time",
+ "timeit",
+ "tkinter",
+ "token",
+ "tokenize",
+ "trace",
+ "traceback",
+ "tracemalloc",
+ "tty",
+ "turtle",
+ "turtledemo",
+ "types",
+ "typing",
+ "unicodedata",
+ "unittest",
+ "urllib",
+ "uu",
+ "uuid",
+ "venv",
+ "warnings",
+ "wave",
+ "weakref",
+ "webbrowser",
+ "winreg",
+ "winsound",
+ "wsgiref",
+ "xdrlib",
+ "xml",
+ "xmlrpc",
+ "zipapp",
+ "zipfile",
+ "zipimport",
+ "zlib",
+ "zoneinfo",
+}
diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_remoteconfiguration.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_remoteconfiguration.py
new file mode 100644
index 0000000..a8a7162
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_remoteconfiguration.py
@@ -0,0 +1,248 @@
+# -*- coding: utf-8 -*-
+import json
+import os
+from typing import Any
+from typing import Dict
+from typing import Mapping
+from typing import Optional
+
+from ddtrace import Tracer
+from ddtrace import config
+from ddtrace.appsec._capabilities import _appsec_rc_file_is_not_static
+from ddtrace.appsec._capabilities import _asm_feature_is_required
+from ddtrace.appsec._constants import PRODUCTS
+from ddtrace.appsec._utils import _appsec_rc_features_is_enabled
+from ddtrace.constants import APPSEC_ENV
+from ddtrace.internal import forksafe
+from ddtrace.internal.logger import get_logger
+from ddtrace.internal.remoteconfig._connectors import PublisherSubscriberConnector
+from ddtrace.internal.remoteconfig._publishers import RemoteConfigPublisherMergeDicts
+from ddtrace.internal.remoteconfig._pubsub import PubSub
+from ddtrace.internal.remoteconfig._subscribers import RemoteConfigSubscriber
+from ddtrace.internal.remoteconfig.worker import remoteconfig_poller
+from ddtrace.internal.utils.formats import asbool
+from ddtrace.settings.asm import config as asm_config
+
+
+log = get_logger(__name__)
+
+APPSEC_PRODUCTS = [PRODUCTS.ASM_FEATURES, PRODUCTS.ASM, PRODUCTS.ASM_DATA, PRODUCTS.ASM_DD]
+
+
+class AppSecRC(PubSub):
+ __subscriber_class__ = RemoteConfigSubscriber
+ __publisher_class__ = RemoteConfigPublisherMergeDicts
+ __shared_data__ = PublisherSubscriberConnector()
+
+ def __init__(self, _preprocess_results, callback):
+ self._publisher = self.__publisher_class__(self.__shared_data__, _preprocess_results)
+ self._subscriber = self.__subscriber_class__(self.__shared_data__, callback, "ASM")
+
+
+def _forksafe_appsec_rc():
+ remoteconfig_poller.start_subscribers_by_product(APPSEC_PRODUCTS)
+
+
+def enable_appsec_rc(test_tracer: Optional[Tracer] = None) -> None:
+ """Remote config will be used by ASM libraries to receive four different updates from the backend.
+ Each update has it’s own product:
+ - ASM_FEATURES product - To allow users enable or disable ASM remotely
+ - ASM product - To allow clients to activate or deactivate rules
+ - ASM_DD product - To allow the library to receive rules updates
+ - ASM_DATA product - To allow the library to receive list of blocked IPs and users
+
+ If environment variable `DD_APPSEC_ENABLED` is not set, registering ASM_FEATURE can enable ASM remotely. If
+ it's set to true, we will register the rest of the products.
+
+ Parameters `test_tracer` and `start_subscribers` are needed for testing purposes
+ """
+ # Import tracer here to avoid a circular import
+ if test_tracer is None:
+ from ddtrace import tracer
+ else:
+ tracer = test_tracer
+
+ log.debug("[%s][P: %s] Register ASM Remote Config Callback", os.getpid(), os.getppid())
+ asm_callback = (
+ remoteconfig_poller.get_registered(PRODUCTS.ASM_FEATURES)
+ or remoteconfig_poller.get_registered(PRODUCTS.ASM)
+ or AppSecRC(_preprocess_results_appsec_1click_activation, _appsec_callback)
+ )
+
+ if _asm_feature_is_required():
+ remoteconfig_poller.register(PRODUCTS.ASM_FEATURES, asm_callback)
+
+ if tracer._asm_enabled and _appsec_rc_file_is_not_static():
+ remoteconfig_poller.register(PRODUCTS.ASM_DATA, asm_callback) # IP Blocking
+ remoteconfig_poller.register(PRODUCTS.ASM, asm_callback) # Exclusion Filters & Custom Rules
+ remoteconfig_poller.register(PRODUCTS.ASM_DD, asm_callback) # DD Rules
+
+ forksafe.register(_forksafe_appsec_rc)
+
+
+def disable_appsec_rc():
+ # only used to avoid data leaks between tests
+ for product_name in APPSEC_PRODUCTS:
+ remoteconfig_poller.unregister(product_name)
+
+
+def _add_rules_to_list(features: Mapping[str, Any], feature: str, message: str, ruleset: Dict[str, Any]) -> None:
+ rules = features.get(feature, None)
+ if rules is not None:
+ try:
+ if ruleset.get(feature) is None:
+ ruleset[feature] = []
+ ruleset[feature] += rules
+ log.debug("Reloading Appsec %s: %s", message, str(rules)[:20])
+ except json.JSONDecodeError:
+ log.error("ERROR Appsec %s: invalid JSON content from remote configuration", message)
+
+
+def _appsec_callback(features: Mapping[str, Any], test_tracer: Optional[Tracer] = None) -> None:
+ config = features.get("config", {})
+ _appsec_1click_activation(config, test_tracer)
+ _appsec_api_security_settings(config, test_tracer)
+ _appsec_rules_data(config, test_tracer)
+
+
+def _appsec_rules_data(features: Mapping[str, Any], test_tracer: Optional[Tracer]) -> bool:
+ # Tracer is a parameter for testing propose
+ # Import tracer here to avoid a circular import
+ if test_tracer is None:
+ from ddtrace import tracer
+ else:
+ tracer = test_tracer
+
+ if features and tracer._appsec_processor:
+ ruleset = {} # type: dict[str, Optional[list[Any]]]
+ _add_rules_to_list(features, "rules_data", "rules data", ruleset)
+ _add_rules_to_list(features, "custom_rules", "custom rules", ruleset)
+ _add_rules_to_list(features, "rules", "Datadog rules", ruleset)
+ _add_rules_to_list(features, "exclusions", "exclusion filters", ruleset)
+ _add_rules_to_list(features, "rules_override", "rules override", ruleset)
+ _add_rules_to_list(features, "scanners", "scanners", ruleset)
+ _add_rules_to_list(features, "processors", "processors", ruleset)
+ if ruleset:
+ return tracer._appsec_processor._update_rules({k: v for k, v in ruleset.items() if v is not None})
+
+ return False
+
+
+def _preprocess_results_appsec_1click_activation(
+ features: Dict[str, Any], pubsub_instance: Optional[PubSub] = None
+) -> Dict[str, Any]:
+ """The main process has the responsibility to enable or disable the ASM products. The child processes don't
+ care about that, the children only need to know about payload content.
+ """
+ if _appsec_rc_features_is_enabled():
+ log.debug(
+ "[%s][P: %s] Receiving ASM Remote Configuration ASM_FEATURES: %s",
+ os.getpid(),
+ os.getppid(),
+ features.get("asm", {}),
+ )
+
+ rc_asm_enabled = None
+ if features is not None:
+ if APPSEC_ENV in os.environ:
+ rc_asm_enabled = asbool(os.environ.get(APPSEC_ENV))
+ elif features == {}:
+ rc_asm_enabled = False
+ else:
+ asm_features = features.get("asm", {})
+ if asm_features is not None:
+ rc_asm_enabled = asm_features.get("enabled")
+ log.debug(
+ "[%s][P: %s] ASM Remote Configuration ASM_FEATURES. Appsec enabled: %s",
+ os.getpid(),
+ os.getppid(),
+ rc_asm_enabled,
+ )
+ if rc_asm_enabled is not None:
+ from ddtrace.appsec._constants import PRODUCTS
+
+ if pubsub_instance is None:
+ pubsub_instance = (
+ remoteconfig_poller.get_registered(PRODUCTS.ASM_FEATURES)
+ or remoteconfig_poller.get_registered(PRODUCTS.ASM)
+ or AppSecRC(_preprocess_results_appsec_1click_activation, _appsec_callback)
+ )
+
+ if rc_asm_enabled and _appsec_rc_file_is_not_static():
+ remoteconfig_poller.register(PRODUCTS.ASM_DATA, pubsub_instance) # IP Blocking
+ remoteconfig_poller.register(PRODUCTS.ASM, pubsub_instance) # Exclusion Filters & Custom Rules
+ remoteconfig_poller.register(PRODUCTS.ASM_DD, pubsub_instance) # DD Rules
+ else:
+ remoteconfig_poller.unregister(PRODUCTS.ASM_DATA)
+ remoteconfig_poller.unregister(PRODUCTS.ASM)
+ remoteconfig_poller.unregister(PRODUCTS.ASM_DD)
+
+ features["asm"] = {"enabled": rc_asm_enabled}
+ return features
+
+
+def _appsec_1click_activation(features: Mapping[str, Any], test_tracer: Optional[Tracer] = None) -> None:
+ """This callback updates appsec enabled in tracer and config instances following this logic:
+ ```
+ | DD_APPSEC_ENABLED | RC Enabled | Result |
+ |-------------------|------------|----------|
+ | | | Disabled |
+ | | false | Disabled |
+ | | true | Enabled |
+ | false | | Disabled |
+ | true | | Enabled |
+ | false | true | Disabled |
+ | true | true | Enabled |
+ ```
+ """
+ if _appsec_rc_features_is_enabled():
+ # Tracer is a parameter for testing propose
+ # Import tracer here to avoid a circular import
+ if test_tracer is None:
+ from ddtrace import tracer
+ else:
+ tracer = test_tracer
+
+ log.debug("[%s][P: %s] ASM_FEATURES: %s", os.getpid(), os.getppid(), str(features)[:100])
+ if APPSEC_ENV in os.environ:
+ # no one click activation if var env is set
+ rc_asm_enabled = asbool(os.environ.get(APPSEC_ENV))
+ elif features is False:
+ rc_asm_enabled = False
+ else:
+ rc_asm_enabled = features.get("asm", {}).get("enabled", False)
+
+ log.debug("APPSEC_ENABLED: %s", rc_asm_enabled)
+ if rc_asm_enabled is not None:
+ log.debug(
+ "[%s][P: %s] Updating ASM Remote Configuration ASM_FEATURES: %s",
+ os.getpid(),
+ os.getppid(),
+ rc_asm_enabled,
+ )
+
+ if rc_asm_enabled:
+ if not tracer._asm_enabled:
+ tracer.configure(appsec_enabled=True)
+ else:
+ asm_config._asm_enabled = True
+ else:
+ if tracer._asm_enabled:
+ tracer.configure(appsec_enabled=False)
+ else:
+ asm_config._asm_enabled = False
+
+
+def _appsec_api_security_settings(features: Mapping[str, Any], test_tracer: Optional[Tracer] = None) -> None:
+ """
+ Update API Security settings from remote config
+ Actually: Update sample rate
+ """
+ if config._remote_config_enabled and asm_config._api_security_enabled:
+ rc_api_security_sample_rate = features.get("api_security", {}).get("request_sample_rate", None)
+ if rc_api_security_sample_rate is not None:
+ try:
+ sample_rate = max(0.0, min(1.0, float(rc_api_security_sample_rate)))
+ asm_config._api_security_sample_rate = sample_rate
+ except BaseException: # nosec
+ pass
diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_trace_utils.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_trace_utils.py
new file mode 100644
index 0000000..a09b8c8
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_trace_utils.py
@@ -0,0 +1,346 @@
+from typing import Optional
+
+from ddtrace import Span
+from ddtrace import Tracer
+from ddtrace import constants
+from ddtrace.appsec import _asm_request_context
+from ddtrace.appsec._constants import APPSEC
+from ddtrace.appsec._constants import LOGIN_EVENTS_MODE
+from ddtrace.appsec._constants import WAF_CONTEXT_NAMES
+from ddtrace.appsec._utils import _safe_userid
+from ddtrace.contrib.trace_utils import set_user
+from ddtrace.ext import SpanTypes
+from ddtrace.ext import user
+from ddtrace.internal import core
+from ddtrace.internal.logger import get_logger
+from ddtrace.settings.asm import config as asm_config
+
+
+log = get_logger(__name__)
+
+
+def _asm_manual_keep(span: Span) -> None:
+ from ddtrace.internal.constants import SAMPLING_DECISION_TRACE_TAG_KEY
+ from ddtrace.internal.sampling import SamplingMechanism
+
+ span.set_tag(constants.MANUAL_KEEP_KEY)
+ # set decision maker to ASM = -5
+ span.set_tag_str(SAMPLING_DECISION_TRACE_TAG_KEY, "-%d" % SamplingMechanism.APPSEC)
+
+
+def _track_user_login_common(
+ tracer: Tracer,
+ success: bool,
+ metadata: Optional[dict] = None,
+ login_events_mode: str = LOGIN_EVENTS_MODE.SDK,
+ login: Optional[str] = None,
+ name: Optional[str] = None,
+ email: Optional[str] = None,
+ span: Optional[Span] = None,
+) -> Optional[Span]:
+ if span is None:
+ span = tracer.current_root_span()
+ if span:
+ success_str = "success" if success else "failure"
+ tag_prefix = "%s.%s" % (APPSEC.USER_LOGIN_EVENT_PREFIX, success_str)
+
+ if success:
+ span.set_tag_str(APPSEC.USER_LOGIN_EVENT_SUCCESS_TRACK, "true")
+ else:
+ span.set_tag_str(APPSEC.USER_LOGIN_EVENT_FAILURE_TRACK, "true")
+
+ # This is used to mark if the call was done from the SDK of the automatic login events
+ if login_events_mode == LOGIN_EVENTS_MODE.SDK:
+ span.set_tag_str("%s.sdk" % tag_prefix, "true")
+
+ mode_tag = APPSEC.AUTO_LOGIN_EVENTS_SUCCESS_MODE if success else APPSEC.AUTO_LOGIN_EVENTS_FAILURE_MODE
+ auto_tag_mode = (
+ login_events_mode if login_events_mode != LOGIN_EVENTS_MODE.SDK else asm_config._automatic_login_events_mode
+ )
+ span.set_tag_str(mode_tag, auto_tag_mode)
+
+ tag_metadata_prefix = "%s.%s" % (APPSEC.USER_LOGIN_EVENT_PREFIX_PUBLIC, success_str)
+ if metadata is not None:
+ for k, v in metadata.items():
+ span.set_tag_str("%s.%s" % (tag_metadata_prefix, k), str(v))
+
+ if login:
+ span.set_tag_str("%s.login" % tag_prefix, login)
+
+ if email:
+ span.set_tag_str("%s.email" % tag_prefix, email)
+
+ if name:
+ span.set_tag_str("%s.username" % tag_prefix, name)
+
+ _asm_manual_keep(span)
+ return span
+ else:
+ log.warning(
+ "No root span in the current execution. Skipping track_user_success_login tags. "
+ "See https://docs.datadoghq.com/security_platform/application_security/setup_and_configure/"
+ "?tab=set_user&code-lang=python for more information.",
+ )
+ return None
+
+
+def track_user_login_success_event(
+ tracer: Tracer,
+ user_id: str,
+ metadata: Optional[dict] = None,
+ login: Optional[str] = None,
+ name: Optional[str] = None,
+ email: Optional[str] = None,
+ scope: Optional[str] = None,
+ role: Optional[str] = None,
+ session_id: Optional[str] = None,
+ propagate: bool = False,
+ login_events_mode: str = LOGIN_EVENTS_MODE.SDK,
+ span: Optional[Span] = None,
+) -> None:
+ """
+ Add a new login success tracking event. The parameters after metadata (name, email,
+ scope, role, session_id, propagate) will be passed to the `set_user` function that will be called
+ by this one, see:
+ https://docs.datadoghq.com/logs/log_configuration/attributes_naming_convention/#user-related-attributes
+ https://docs.datadoghq.com/security_platform/application_security/setup_and_configure/?tab=set_tag&code-lang=python
+
+ :param tracer: tracer instance to use
+ :param user_id: a string with the UserId
+ :param metadata: a dictionary with additional metadata information to be stored with the event
+ """
+
+ span = _track_user_login_common(tracer, True, metadata, login_events_mode, login, name, email, span)
+ if not span:
+ return
+
+ if (
+ user_id
+ and (login_events_mode not in (LOGIN_EVENTS_MODE.SDK, LOGIN_EVENTS_MODE.EXTENDED))
+ and not asm_config._user_model_login_field
+ ):
+ user_id = _safe_userid(user_id)
+
+ set_user(tracer, user_id, name, email, scope, role, session_id, propagate, span)
+
+
+def track_user_login_failure_event(
+ tracer: Tracer,
+ user_id: Optional[str],
+ exists: bool,
+ metadata: Optional[dict] = None,
+ login_events_mode: str = LOGIN_EVENTS_MODE.SDK,
+) -> None:
+ """
+ Add a new login failure tracking event.
+ :param tracer: tracer instance to use
+ :param user_id: a string with the UserId if exists=True or the username if not
+ :param exists: a boolean indicating if the user exists in the system
+ :param metadata: a dictionary with additional metadata information to be stored with the event
+ """
+
+ if (
+ user_id
+ and (login_events_mode not in (LOGIN_EVENTS_MODE.SDK, LOGIN_EVENTS_MODE.EXTENDED))
+ and not asm_config._user_model_login_field
+ ):
+ user_id = _safe_userid(user_id)
+
+ span = _track_user_login_common(tracer, False, metadata, login_events_mode)
+ if not span:
+ return
+
+ if user_id:
+ span.set_tag_str("%s.failure.%s" % (APPSEC.USER_LOGIN_EVENT_PREFIX_PUBLIC, user.ID), str(user_id))
+ exists_str = "true" if exists else "false"
+ span.set_tag_str("%s.failure.%s" % (APPSEC.USER_LOGIN_EVENT_PREFIX_PUBLIC, user.EXISTS), exists_str)
+
+
+def track_user_signup_event(
+ tracer: Tracer, user_id: str, success: bool, login_events_mode: str = LOGIN_EVENTS_MODE.SDK
+) -> None:
+ span = tracer.current_root_span()
+ if span:
+ success_str = "true" if success else "false"
+ span.set_tag_str(APPSEC.USER_SIGNUP_EVENT, success_str)
+ span.set_tag_str(user.ID, user_id)
+ _asm_manual_keep(span)
+
+ # This is used to mark if the call was done from the SDK of the automatic login events
+ if login_events_mode == LOGIN_EVENTS_MODE.SDK:
+ span.set_tag_str("%s.sdk" % APPSEC.USER_SIGNUP_EVENT, "true")
+ else:
+ span.set_tag_str("%s.auto.mode" % APPSEC.USER_SIGNUP_EVENT, str(login_events_mode))
+
+ return
+ else:
+ log.warning(
+ "No root span in the current execution. Skipping track_user_signup tags. "
+ "See https://docs.datadoghq.com/security_platform/application_security/setup_and_configure/"
+ "?tab=set_user&code-lang=python for more information.",
+ )
+
+
+def track_custom_event(tracer: Tracer, event_name: str, metadata: dict) -> None:
+ """
+ Add a new custom tracking event.
+
+ :param tracer: tracer instance to use
+ :param event_name: the name of the custom event
+ :param metadata: a dictionary with additional metadata information to be stored with the event
+ """
+
+ if not event_name:
+ log.warning("Empty event name given to track_custom_event. Skipping setting tags.")
+ return
+
+ if not metadata:
+ log.warning("Empty metadata given to track_custom_event. Skipping setting tags.")
+ return
+
+ span = tracer.current_root_span()
+ if not span:
+ log.warning(
+ "No root span in the current execution. Skipping track_custom_event tags. "
+ "See https://docs.datadoghq.com/security_platform/application_security"
+ "/setup_and_configure/"
+ "?tab=set_user&code-lang=python for more information.",
+ )
+ return
+
+ span.set_tag_str("%s.%s.track" % (APPSEC.CUSTOM_EVENT_PREFIX, event_name), "true")
+
+ for k, v in metadata.items():
+ if isinstance(v, bool):
+ str_v = "true" if v else "false"
+ else:
+ str_v = str(v)
+ span.set_tag_str("%s.%s.%s" % (APPSEC.CUSTOM_EVENT_PREFIX, event_name, k), str_v)
+ _asm_manual_keep(span)
+
+
+def should_block_user(tracer: Tracer, userid: str) -> bool:
+ """
+ Return true if the specified User ID should be blocked.
+
+ :param tracer: tracer instance to use
+ :param userid: the ID of the user as registered by `set_user`
+ """
+
+ if not asm_config._asm_enabled:
+ log.warning(
+ "One click blocking of user ids is disabled. To use this feature please enable "
+ "Application Security Monitoring"
+ )
+ return False
+
+ # Early check to avoid calling the WAF if the request is already blocked
+ span = tracer.current_root_span()
+ if not span:
+ log.warning(
+ "No root span in the current execution. should_block_user returning False"
+ "See https://docs.datadoghq.com/security_platform/application_security"
+ "/setup_and_configure/"
+ "?tab=set_user&code-lang=python for more information.",
+ )
+ return False
+
+ if core.get_item(WAF_CONTEXT_NAMES.BLOCKED, span=span):
+ return True
+
+ _asm_request_context.call_waf_callback(custom_data={"REQUEST_USER_ID": str(userid)})
+ return bool(core.get_item(WAF_CONTEXT_NAMES.BLOCKED, span=span))
+
+
+def block_request() -> None:
+ """
+ Block the current request and return a 403 Unauthorized response. If the response
+ has already been started to be sent this could not work. The behaviour of this function
+ could be different among frameworks, but it usually involves raising some kind of internal Exception,
+ meaning that if you capture the exception the request blocking could not work.
+ """
+ if not asm_config._asm_enabled:
+ log.warning("block_request() is disabled. To use this feature please enable" "Application Security Monitoring")
+ return
+
+ _asm_request_context.block_request()
+
+
+def block_request_if_user_blocked(tracer: Tracer, userid: str) -> None:
+ """
+ Check if the specified User ID should be blocked and if positive
+ block the current request using `block_request`.
+
+ :param tracer: tracer instance to use
+ :param userid: the ID of the user as registered by `set_user`
+ """
+ if not asm_config._asm_enabled:
+ log.warning("should_block_user call requires ASM to be enabled")
+ return
+
+ if should_block_user(tracer, userid):
+ span = tracer.current_root_span()
+ if span:
+ span.set_tag_str(user.ID, str(userid))
+ _asm_request_context.block_request()
+
+
+def _on_django_login(
+ pin,
+ request,
+ user,
+ mode,
+ info_retriever,
+):
+ if not asm_config._asm_enabled:
+ return
+
+ if user:
+ if str(user) != "AnonymousUser":
+ user_id, user_extra = info_retriever.get_user_info()
+
+ with pin.tracer.trace("django.contrib.auth.login", span_type=SpanTypes.AUTH):
+ from ddtrace.contrib.django.compat import user_is_authenticated
+
+ if user_is_authenticated(user):
+ session_key = getattr(request, "session_key", None)
+ track_user_login_success_event(
+ pin.tracer,
+ user_id=user_id,
+ session_id=session_key,
+ propagate=True,
+ login_events_mode=mode,
+ **user_extra,
+ )
+ else:
+ # Login failed but the user exists
+ track_user_login_failure_event(pin.tracer, user_id=user_id, exists=True, login_events_mode=mode)
+ else:
+ # Login failed and the user is unknown
+ user_id = info_retriever.get_userid()
+ track_user_login_failure_event(pin.tracer, user_id=user_id, exists=False, login_events_mode=mode)
+
+
+def _on_django_auth(result_user, mode, kwargs, pin, info_retriever):
+ if not asm_config._asm_enabled:
+ return True, result_user
+
+ extended_userid_fields = info_retriever.possible_user_id_fields + info_retriever.possible_login_fields
+ userid_list = info_retriever.possible_user_id_fields if mode == "safe" else extended_userid_fields
+
+ for possible_key in userid_list:
+ if possible_key in kwargs:
+ user_id = kwargs[possible_key]
+ break
+ else:
+ user_id = None
+
+ if not result_user:
+ with pin.tracer.trace("django.contrib.auth.login", span_type=SpanTypes.AUTH):
+ track_user_login_failure_event(pin.tracer, user_id=user_id, exists=False, login_events_mode=mode)
+
+ return False, None
+
+
+core.on("django.login", _on_django_login)
+core.on("django.auth", _on_django_auth, "user")
diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_utils.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_utils.py
new file mode 100644
index 0000000..8efd4cf
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/_utils.py
@@ -0,0 +1,176 @@
+import os
+import uuid
+
+from ddtrace.appsec._constants import API_SECURITY
+from ddtrace.constants import APPSEC_ENV
+from ddtrace.internal.compat import to_unicode
+from ddtrace.internal.logger import get_logger
+from ddtrace.internal.utils.http import _get_blocked_template # noqa:F401
+from ddtrace.internal.utils.http import parse_form_multipart # noqa:F401
+from ddtrace.internal.utils.http import parse_form_params # noqa:F401
+from ddtrace.settings import _config as config
+from ddtrace.settings.asm import config as asm_config
+
+
+log = get_logger(__name__)
+
+
+def parse_response_body(raw_body):
+ import json
+
+ import xmltodict
+
+ from ddtrace.appsec import _asm_request_context
+ from ddtrace.appsec._constants import SPAN_DATA_NAMES
+ from ddtrace.contrib.trace_utils import _get_header_value_case_insensitive
+
+ if not raw_body:
+ return
+
+ if isinstance(raw_body, dict):
+ return raw_body
+
+ headers = _asm_request_context.get_waf_address(SPAN_DATA_NAMES.RESPONSE_HEADERS_NO_COOKIES)
+ if not headers:
+ return
+ content_type = _get_header_value_case_insensitive(
+ {to_unicode(k): to_unicode(v) for k, v in dict(headers).items()},
+ "content-type",
+ )
+ if not content_type:
+ return
+
+ def access_body(bd):
+ if isinstance(bd, list) and isinstance(bd[0], (str, bytes)):
+ bd = bd[0][:0].join(bd)
+ if getattr(bd, "decode", False):
+ bd = bd.decode("UTF-8", errors="ignore")
+ if len(bd) >= API_SECURITY.MAX_PAYLOAD_SIZE:
+ raise ValueError("response body larger than 16MB")
+ return bd
+
+ req_body = None
+ try:
+ # TODO handle charset
+ if "json" in content_type:
+ req_body = json.loads(access_body(raw_body))
+ elif "xml" in content_type:
+ req_body = xmltodict.parse(access_body(raw_body))
+ else:
+ return
+ except BaseException:
+ log.debug("Failed to parse response body", exc_info=True)
+ else:
+ return req_body
+
+
+def _appsec_rc_features_is_enabled() -> bool:
+ if config._remote_config_enabled:
+ return APPSEC_ENV not in os.environ
+ return False
+
+
+def _appsec_apisec_features_is_active() -> bool:
+ return asm_config._asm_enabled and asm_config._api_security_enabled and asm_config._api_security_sample_rate > 0.0
+
+
+def _safe_userid(user_id):
+ try:
+ _ = int(user_id)
+ return user_id
+ except ValueError:
+ try:
+ _ = uuid.UUID(user_id)
+ return user_id
+ except ValueError:
+ pass
+
+ return None
+
+
+class _UserInfoRetriever:
+ def __init__(self, user):
+ self.user = user
+ self.possible_user_id_fields = ["pk", "id", "uid", "userid", "user_id", "PK", "ID", "UID", "USERID"]
+ self.possible_login_fields = ["username", "user", "login", "USERNAME", "USER", "LOGIN"]
+ self.possible_email_fields = ["email", "mail", "address", "EMAIL", "MAIL", "ADDRESS"]
+ self.possible_name_fields = [
+ "name",
+ "fullname",
+ "full_name",
+ "first_name",
+ "NAME",
+ "FULLNAME",
+ "FULL_NAME",
+ "FIRST_NAME",
+ ]
+
+ def find_in_user_model(self, possible_fields):
+ for field in possible_fields:
+ value = getattr(self.user, field, None)
+ if value:
+ return value
+
+ return None # explicit to make clear it has a meaning
+
+ def get_userid(self):
+ user_login = getattr(self.user, asm_config._user_model_login_field, None)
+ if user_login:
+ return user_login
+
+ user_login = self.find_in_user_model(self.possible_user_id_fields)
+ if config._automatic_login_events_mode == "extended":
+ return user_login
+
+ return _safe_userid(user_login)
+
+ def get_username(self):
+ username = getattr(self.user, asm_config._user_model_name_field, None)
+ if username:
+ return username
+
+ if hasattr(self.user, "get_username"):
+ try:
+ return self.user.get_username()
+ except Exception:
+ log.debug("User model get_username member produced an exception: ", exc_info=True)
+
+ return self.find_in_user_model(self.possible_login_fields)
+
+ def get_user_email(self):
+ email = getattr(self.user, asm_config._user_model_email_field, None)
+ if email:
+ return email
+
+ return self.find_in_user_model(self.possible_email_fields)
+
+ def get_name(self):
+ name = getattr(self.user, asm_config._user_model_name_field, None)
+ if name:
+ return name
+
+ return self.find_in_user_model(self.possible_name_fields)
+
+ def get_user_info(self):
+ """
+ In safe mode, try to get the user id from the user object.
+ In extended mode, try to also get the username (which will be the returned user_id),
+ email and name.
+ """
+ user_extra_info = {}
+
+ user_id = self.get_userid()
+ if asm_config._automatic_login_events_mode == "extended":
+ if not user_id:
+ user_id = self.find_in_user_model(self.possible_user_id_fields)
+
+ user_extra_info = {
+ "login": self.get_username(),
+ "email": self.get_user_email(),
+ "name": self.get_name(),
+ }
+
+ if not user_id:
+ return None, {}
+
+ return user_id, user_extra_info
diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/iast/__init__.py b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/iast/__init__.py
new file mode 100644
index 0000000..d109e02
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/iast/__init__.py
@@ -0,0 +1 @@
+from ddtrace.appsec._iast import ddtrace_iast_flask_patch # noqa: F401
diff --git a/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/rules.json b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/rules.json
new file mode 100644
index 0000000..d572c00
--- /dev/null
+++ b/lambdas/aws-dd-forwarder-3.127.0/ddtrace/appsec/rules.json
@@ -0,0 +1,9320 @@
+{
+ "version": "2.2",
+ "metadata": {
+ "rules_version": "1.10.0"
+ },
+ "rules": [
+ {
+ "id": "blk-001-001",
+ "name": "Block IP Addresses",
+ "tags": {
+ "type": "block_ip",
+ "category": "security_response"
+ },
+ "conditions": [
+ {
+ "parameters": {
+ "inputs": [
+ {
+ "address": "http.client_ip"
+ }
+ ],
+ "data": "blocked_ips"
+ },
+ "operator": "ip_match"
+ }
+ ],
+ "transformers": [],
+ "on_match": [
+ "block"
+ ]
+ },
+ {
+ "id": "blk-001-002",
+ "name": "Block User Addresses",
+ "tags": {
+ "type": "block_user",
+ "category": "security_response"
+ },
+ "conditions": [
+ {
+ "parameters": {
+ "inputs": [
+ {
+ "address": "usr.id"
+ }
+ ],
+ "data": "blocked_users"
+ },
+ "operator": "exact_match"
+ }
+ ],
+ "transformers": [],
+ "on_match": [
+ "block"
+ ]
+ },
+ {
+ "id": "crs-913-110",
+ "name": "Acunetix",
+ "tags": {
+ "type": "commercial_scanner",
+ "crs_id": "913110",
+ "category": "attack_attempt",
+ "tool_name": "Acunetix",
+ "cwe": "200",
+ "capec": "1000/118/169",
+ "confidence": "0"
+ },
+ "conditions": [
+ {
+ "parameters": {
+ "inputs": [
+ {
+ "address": "server.request.headers.no_cookies"
+ }
+ ],
+ "list": [
+ "acunetix-product",
+ "(acunetix web vulnerability scanner",
+ "acunetix-scanning-agreement",
+ "acunetix-user-agreement",
+ "md5(acunetix_wvs_security_test)"
+ ]
+ },
+ "operator": "phrase_match"
+ }
+ ],
+ "transformers": [
+ "lowercase"
+ ]
+ },
+ {
+ "id": "crs-913-120",
+ "name": "Known security scanner filename/argument",
+ "tags": {
+ "type": "security_scanner",
+ "crs_id": "913120",
+ "category": "attack_attempt",
+ "cwe": "200",
+ "capec": "1000/118/169",
+ "confidence": "1"
+ },
+ "conditions": [
+ {
+ "parameters": {
+ "inputs": [
+ {
+ "address": "server.request.query"
+ },
+ {
+ "address": "server.request.body"
+ },
+ {
+ "address": "server.request.path_params"
+ },
+ {
+ "address": "grpc.server.request.message"
+ },
+ {
+ "address": "graphql.server.all_resolvers"
+ },
+ {
+ "address": "graphql.server.resolver"
+ }
+ ],
+ "list": [
+ "/.adsensepostnottherenonobook",
+ "/hello.html",
+ "/actsensepostnottherenonotive",
+ "/acunetix-wvs-test-for-some-inexistent-file",
+ "/antidisestablishmentarianism",
+ "/appscan_fingerprint/mac_address",
+ "/arachni-",
+ "/cybercop",
+ "/nessus_is_probing_you_",
+ "/nessustest",
+ "/netsparker-",
+ "/rfiinc.txt",
+ "/thereisnowaythat-you-canbethere",
+ "/w3af/remotefileinclude.html",
+ "appscan_fingerprint",
+ "w00tw00t.at.isc.sans.dfind",
+ "w00tw00t.at.blackhats.romanian.anti-sec"
+ ]
+ },
+ "operator": "phrase_match"
+ }
+ ],
+ "transformers": [
+ "lowercase"
+ ]
+ },
+ {
+ "id": "crs-920-260",
+ "name": "Unicode Full/Half Width Abuse Attack Attempt",
+ "tags": {
+ "type": "http_protocol_violation",
+ "crs_id": "920260",
+ "category": "attack_attempt",
+ "cwe": "176",
+ "capec": "1000/255/153/267/71",
+ "confidence": "0"
+ },
+ "conditions": [
+ {
+ "parameters": {
+ "inputs": [
+ {
+ "address": "server.request.uri.raw"
+ }
+ ],
+ "regex": "\\%u[fF]{2}[0-9a-fA-F]{2}",
+ "options": {
+ "case_sensitive": true,
+ "min_length": 6
+ }
+ },
+ "operator": "match_regex"
+ }
+ ],
+ "transformers": []
+ },
+ {
+ "id": "crs-921-110",
+ "name": "HTTP Request Smuggling Attack",
+ "tags": {
+ "type": "http_protocol_violation",
+ "crs_id": "921110",
+ "category": "attack_attempt",
+ "cwe": "444",
+ "capec": "1000/210/272/220/33"
+ },
+ "conditions": [
+ {
+ "parameters": {
+ "inputs": [
+ {
+ "address": "server.request.query"
+ },
+ {
+ "address": "server.request.body"
+ },
+ {
+ "address": "server.request.path_params"
+ }
+ ],
+ "regex": "(?:get|post|head|options|connect|put|delete|trace|track|patch|propfind|propatch|mkcol|copy|move|lock|unlock)\\s+[^\\s]+\\s+http/\\d",
+ "options": {
+ "case_sensitive": true,
+ "min_length": 12
+ }
+ },
+ "operator": "match_regex"
+ }
+ ],
+ "transformers": [
+ "lowercase"
+ ]
+ },
+ {
+ "id": "crs-921-160",
+ "name": "HTTP Header Injection Attack via payload (CR/LF and header-name detected)",
+ "tags": {
+ "type": "http_protocol_violation",
+ "crs_id": "921160",
+ "category": "attack_attempt",
+ "cwe": "113",
+ "capec": "1000/210/272/220/105"
+ },
+ "conditions": [
+ {
+ "parameters": {
+ "inputs": [
+ {
+ "address": "server.request.query"
+ },
+ {
+ "address": "server.request.path_params"
+ }
+ ],
+ "regex": "[\\n\\r]+(?:refresh|(?:set-)?cookie|(?:x-)?(?:forwarded-(?:for|host|server)|via|remote-ip|remote-addr|originating-IP))\\s*:",
+ "options": {
+ "case_sensitive": true,
+ "min_length": 3
+ }
+ },
+ "operator": "match_regex"
+ }
+ ],
+ "transformers": [
+ "lowercase"
+ ]
+ },
+ {
+ "id": "crs-930-100",
+ "name": "Obfuscated Path Traversal Attack (/../)",
+ "tags": {
+ "type": "lfi",
+ "crs_id": "930100",
+ "category": "attack_attempt",
+ "cwe": "22",
+ "capec": "1000/255/153/126",
+ "confidence": "1"
+ },
+ "conditions": [
+ {
+ "parameters": {
+ "inputs": [
+ {
+ "address": "server.request.uri.raw"
+ },
+ {
+ "address": "server.request.headers.no_cookies"
+ }
+ ],
+ "regex": "(?:%(?:c(?:0%(?:[2aq]f|5c|9v)|1%(?:[19p]c|8s|af))|2(?:5(?:c(?:0%25af|1%259c)|2f|5c)|%46|f)|(?:(?:f(?:8%8)?0%8|e)0%80%a|bg%q)f|%3(?:2(?:%(?:%6|4)6|F)|5%%63)|u(?:221[56]|002f|EFC8|F025)|1u|5c)|0x(?:2f|5c)|\\/|\\x5c)(?:%(?:(?:f(?:(?:c%80|8)%8)?0%8|e)0%80%ae|2(?:(?:5(?:c0%25a|2))?e|%45)|u(?:(?:002|ff0)e|2024)|%32(?:%(?:%6|4)5|E)|c0(?:%[256aef]e|\\.))|\\.(?:%0[01])?|0x2e){2,3}(?:%(?:c(?:0%(?:[2aq]f|5c|9v)|1%(?:[19p]c|8s|af))|2(?:5(?:c(?:0%25af|1%259c)|2f|5c)|%46|f)|(?:(?:f(?:8%8)?0%8|e)0%80%a|bg%q)f|%3(?:2(?:%(?:%6|4)6|F)|5%%63)|u(?:221[56]|002f|EFC8|F025)|1u|5c)|0x(?:2f|5c)|\\/|\\x5c)",
+ "options": {
+ "min_length": 4
+ }
+ },
+ "operator": "match_regex"
+ }
+ ],
+ "transformers": [
+ "normalizePath"
+ ]
+ },
+ {
+ "id": "crs-930-110",
+ "name": "Simple Path Traversal Attack (/../)",
+ "tags": {
+ "type": "lfi",
+ "crs_id": "930110",
+ "category": "attack_attempt",
+ "cwe": "22",
+ "capec": "1000/255/153/126",
+ "confidence": "1"
+ },
+ "conditions": [
+ {
+ "parameters": {
+ "inputs": [
+ {
+ "address": "server.request.uri.raw"
+ },
+ {
+ "address": "server.request.headers.no_cookies"
+ }
+ ],
+ "regex": "(?:(?:^|[\\x5c/])\\.{2,3}[\\x5c/]|[\\x5c/]\\.{2,3}(?:[\\x5c/]|$))",
+ "options": {
+ "case_sensitive": true,
+ "min_length": 3
+ }
+ },
+ "operator": "match_regex"
+ }
+ ],
+ "transformers": [
+ "removeNulls"
+ ]
+ },
+ {
+ "id": "crs-930-120",
+ "name": "OS File Access Attempt",
+ "tags": {
+ "type": "lfi",
+ "crs_id": "930120",
+ "category": "attack_attempt",
+ "cwe": "22",
+ "capec": "1000/255/153/126",
+ "confidence": "1"
+ },
+ "conditions": [
+ {
+ "parameters": {
+ "inputs": [
+ {
+ "address": "server.request.query"
+ },
+ {
+ "address": "server.request.body"
+ },
+ {
+ "address": "server.request.path_params"
+ },
+ {
+ "address": "grpc.server.request.message"
+ },
+ {
+ "address": "graphql.server.all_resolvers"
+ },
+ {
+ "address": "graphql.server.resolver"
+ }
+ ],
+ "list": [
+ "/.htaccess",
+ "/.htdigest",
+ "/.htpasswd",
+ "/.addressbook",
+ "/.aptitude/config",
+ ".aws/config",
+ ".aws/credentials",
+ "/.bash_config",
+ "/.bash_history",
+ "/.bash_logout",
+ "/.bash_profile",
+ "/.bashrc",
+ ".cache/notify-osd.log",
+ ".config/odesk/odesk team.conf",
+ "/.cshrc",
+ "/.dockerignore",
+ ".drush/",
+ "/.eslintignore",
+ "/.fbcindex",
+ "/.forward",
+ "/.git",
+ ".git/",
+ "/.gitattributes",
+ "/.gitconfig",
+ ".gnupg/",
+ ".hplip/hplip.conf",
+ "/.ksh_history",
+ "/.lesshst",
+ ".lftp/",
+ "/.lhistory",
+ "/.lldb-history",
+ ".local/share/mc/",
+ "/.lynx_cookies",
+ "/.my.cnf",
+ "/.mysql_history",
+ "/.nano_history",
+ "/.node_repl_history",
+ "/.pearrc",
+ "/.pgpass",
+ "/.php_history",
+ "/.pinerc",
+ ".pki/",
+ "/.proclog",
+ "/.procmailrc",
+ "/.psql_history",
+ "/.python_history",
+ "/.rediscli_history",
+ "/.rhistory",
+ "/.rhosts",
+ "/.sh_history",
+ "/.sqlite_history",
+ ".ssh/authorized_keys",
+ ".ssh/config",
+ ".ssh/id_dsa",
+ ".ssh/id_dsa.pub",
+ ".ssh/id_rsa",
+ ".ssh/id_rsa.pub",
+ ".ssh/identity",
+ ".ssh/identity.pub",
+ ".ssh/id_ecdsa",
+ ".ssh/id_ecdsa.pub",
+ ".ssh/known_hosts",
+ ".subversion/auth",
+ ".subversion/config",
+ ".subversion/servers",
+ ".tconn/tconn.conf",
+ "/.tcshrc",
+ ".vidalia/vidalia.conf",
+ "/.viminfo",
+ "/.vimrc",
+ "/.www_acl",
+ "/.wwwacl",
+ "/.xauthority",
+ "/.zhistory",
+ "/.zshrc",
+ "/.zsh_history",
+ "/.nsconfig",
+ "data/elasticsearch",
+ "data/kafka",
+ "etc/ansible",
+ "etc/bind",
+ "etc/centos-release",
+ "etc/centos-release-upstream",
+ "etc/clam.d",
+ "etc/elasticsearch",
+ "etc/freshclam.conf",
+ "etc/gshadow",
+ "etc/gshadow-",
+ "etc/httpd",
+ "etc/kafka",
+ "etc/kibana",
+ "etc/logstash",
+ "etc/lvm",
+ "etc/mongod.conf",
+ "etc/my.cnf",
+ "etc/nuxeo.conf",
+ "etc/pki",
+ "etc/postfix",
+ "etc/scw-release",
+ "etc/subgid",
+ "etc/subgid-",
+ "etc/sudoers.d",
+ "etc/sysconfig",
+ "etc/system-release-cpe",
+ "opt/nuxeo",
+ "opt/tomcat",
+ "tmp/kafka-logs",
+ "usr/lib/rpm/rpm.log",
+ "var/data/elasticsearch",
+ "var/lib/elasticsearch",
+ "etc/.java",
+ "etc/acpi",
+ "etc/alsa",
+ "etc/alternatives",
+ "etc/apache2",
+ "etc/apm",
+ "etc/apparmor",
+ "etc/apparmor.d",
+ "etc/apport",
+ "etc/apt",
+ "etc/asciidoc",
+ "etc/avahi",
+ "etc/bash_completion.d",
+ "etc/binfmt.d",
+ "etc/bluetooth",
+ "etc/bonobo-activation",
+ "etc/brltty",
+ "etc/ca-certificates",
+ "etc/calendar",
+ "etc/chatscripts",
+ "etc/chromium-browser",
+ "etc/clamav",
+ "etc/cni",
+ "etc/console-setup",
+ "etc/coraza-waf",
+ "etc/cracklib",
+ "etc/cron.d",
+ "etc/cron.daily",
+ "etc/cron.hourly",
+ "etc/cron.monthly",
+ "etc/cron.weekly",
+ "etc/cups",
+ "etc/cups.save",
+ "etc/cupshelpers",
+ "etc/dbus-1",
+ "etc/dconf",
+ "etc/default",
+ "etc/depmod.d",
+ "etc/dhcp",
+ "etc/dictionaries-common",
+ "etc/dkms",
+ "etc/dnsmasq.d",
+ "etc/dockeretc/dpkg",
+ "etc/emacs",
+ "etc/environment.d",
+ "etc/fail2ban",
+ "etc/firebird",
+ "etc/firefox",
+ "etc/fonts",
+ "etc/fwupd",
+ "etc/gconf",
+ "etc/gdb",
+ "etc/gdm3",
+ "etc/geoclue",
+ "etc/ghostscript",
+ "etc/gimp",
+ "etc/glvnd",
+ "etc/gnome",
+ "etc/gnome-vfs-2.0",
+ "etc/gnucash",
+ "etc/gnustep",
+ "etc/groff",
+ "etc/grub.d",
+ "etc/gss",
+ "etc/gtk-2.0",
+ "etc/gtk-3.0",
+ "etc/hp",
+ "etc/ifplugd",
+ "etc/imagemagick-6",
+ "etc/init",
+ "etc/init.d",
+ "etc/initramfs-tools",
+ "etc/insserv.conf.d",
+ "etc/iproute2",
+ "etc/iptables",
+ "etc/java",
+ "etc/java-11-openjdk",
+ "etc/java-17-oracle",
+ "etc/java-8-openjdk",
+ "etc/kernel",
+ "etc/ld.so.conf.d",
+ "etc/ldap",
+ "etc/libblockdev",
+ "etc/libibverbs.d",
+ "etc/libnl-3",
+ "etc/libpaper.d",
+ "etc/libreoffice",
+ "etc/lighttpd",
+ "etc/logcheck",
+ "etc/logrotate.d",
+ "etc/lynx",
+ "etc/mail",
+ "etc/mc",
+ "etc/menu",
+ "etc/menu-methods",
+ "etc/modprobe.d",
+ "etc/modsecurity",
+ "etc/modules-load.d",
+ "etc/monit",
+ "etc/mono",
+ "etc/mplayer",
+ "etc/mpv",
+ "etc/muttrc.d",
+ "etc/mysql",
+ "etc/netplan",
+ "etc/network",
+ "etc/networkd-dispatcher",
+ "etc/networkmanager",
+ "etc/newt",
+ "etc/nghttpx",
+ "etc/nikto",
+ "etc/odbcdatasources",
+ "etc/openal",
+ "etc/openmpi",
+ "etc/opt",
+ "etc/osync",
+ "etc/packagekit",
+ "etc/pam.d",
+ "etc/pcmcia",
+ "etc/perl",
+ "etc/php",
+ "etc/pki",
+ "etc/pm",
+ "etc/polkit-1",
+ "etc/postfix",
+ "etc/ppp",
+ "etc/profile.d",
+ "etc/proftpd",
+ "etc/pulse",
+ "etc/python",
+ "etc/rc0.d",
+ "etc/rc1.d",
+ "etc/rc2.d",
+ "etc/rc3.d",
+ "etc/rc4.d",
+ "etc/rc5.d",
+ "etc/rc6.d",
+ "etc/rcs.d",
+ "etc/resolvconf",
+ "etc/rsyslog.d",
+ "etc/samba",
+ "etc/sane.d",
+ "etc/security",
+ "etc/selinux",
+ "etc/sensors.d",
+ "etc/sgml",
+ "etc/signon-ui",
+ "etc/skel",
+ "etc/snmp",
+ "etc/sound",
+ "etc/spamassassin",
+ "etc/speech-dispatcher",
+ "etc/ssh",
+ "etc/ssl",
+ "etc/sudoers.d",
+ "etc/sysctl.d",
+ "etc/sysstat",
+ "etc/systemd",
+ "etc/terminfo",
+ "etc/texmf",
+ "etc/thermald",
+ "etc/thnuclnt",
+ "etc/thunderbird",
+ "etc/timidity",
+ "etc/tmpfiles.d",
+ "etc/ubuntu-advantage",
+ "etc/udev",
+ "etc/udisks2",
+ "etc/ufw",
+ "etc/update-manager",
+ "etc/update-motd.d",
+ "etc/update-notifier",
+ "etc/upower",
+ "etc/urlview",
+ "etc/usb_modeswitch.d",
+ "etc/vim",
+ "etc/vmware",
+ "etc/vmware-installer",
+ "etc/vmware-vix",
+ "etc/vulkan",
+ "etc/w3m",
+ "etc/wireshark",
+ "etc/wpa_supplicant",
+ "etc/x11",
+ "etc/xdg",
+ "etc/xml",
+ "etc/redis.conf",
+ "etc/redis-sentinel.conf",
+ "etc/php.ini",
+ "bin/php.ini",
+ "etc/httpd/php.ini",
+ "usr/lib/php.ini",
+ "usr/lib/php/php.ini",
+ "usr/local/etc/php.ini",
+ "usr/local/lib/php.ini",
+ "usr/local/php/lib/php.ini",
+ "usr/local/php4/lib/php.ini",
+ "usr/local/php5/lib/php.ini",
+ "usr/local/apache/conf/php.ini",
+ "etc/php4.4/fcgi/php.ini",
+ "etc/php4/apache/php.ini",
+ "etc/php4/apache2/php.ini",
+ "etc/php5/apache/php.ini",
+ "etc/php5/apache2/php.ini",
+ "etc/php/php.ini",
+ "etc/php/php4/php.ini",
+ "etc/php/apache/php.ini",
+ "etc/php/apache2/php.ini",
+ "web/conf/php.ini",
+ "usr/local/zend/etc/php.ini",
+ "opt/xampp/etc/php.ini",
+ "var/local/www/conf/php.ini",
+ "etc/php/cgi/php.ini",
+ "etc/php4/cgi/php.ini",
+ "etc/php5/cgi/php.ini",
+ "home2/bin/stable/apache/php.ini",
+ "home/bin/stable/apache/php.ini",
+ "etc/httpd/conf.d/php.conf",
+ "php5/php.ini",
+ "php4/php.ini",
+ "php/php.ini",
+ "windows/php.ini",
+ "winnt/php.ini",
+ "apache/php/php.ini",
+ "xampp/apache/bin/php.ini",
+ "netserver/bin/stable/apache/php.ini",
+ "volumes/macintosh_hd1/usr/local/php/lib/php.ini",
+ "etc/mono/1.0/machine.config",
+ "etc/mono/2.0/machine.config",
+ "etc/mono/2.0/web.config",
+ "etc/mono/config",
+ "usr/local/cpanel/logs/stats_log",
+ "usr/local/cpanel/logs/access_log",
+ "usr/local/cpanel/logs/error_log",
+ "usr/local/cpanel/logs/license_log",
+ "usr/local/cpanel/logs/login_log",
+ "var/cpanel/cpanel.config",
+ "usr/local/psa/admin/logs/httpsd_access_log",
+ "usr/local/psa/admin/logs/panel.log",
+ "usr/local/psa/admin/conf/php.ini",
+ "etc/sw-cp-server/applications.d/plesk.conf",
+ "usr/local/psa/admin/conf/site_isolation_settings.ini",
+ "usr/local/sb/config",
+ "etc/sw-cp-server/applications.d/00-sso-cpserver.conf",
+ "etc/sso/sso_config.ini",
+ "etc/mysql/conf.d/old_passwords.cnf",
+ "var/mysql.log",
+ "var/mysql-bin.index",
+ "var/data/mysql-bin.index",
+ "program files/mysql/mysql server 5.0/data/{host}.err",
+ "program files/mysql/mysql server 5.0/data/mysql.log",
+ "program files/mysql/mysql server 5.0/data/mysql.err",
+ "program files/mysql/mysql server 5.0/data/mysql-bin.log",
+ "program files/mysql/mysql server 5.0/data/mysql-bin.index",
+ "program files/mysql/data/{host}.err",
+ "program files/mysql/data/mysql.log",
+ "program files/mysql/data/mysql.err",
+ "program files/mysql/data/mysql-bin.log",
+ "program files/mysql/data/mysql-bin.index",
+ "mysql/data/{host}.err",
+ "mysql/data/mysql.log",
+ "mysql/data/mysql.err",
+ "mysql/data/mysql-bin.log",
+ "mysql/data/mysql-bin.index",
+ "usr/local/mysql/data/mysql.log",
+ "usr/local/mysql/data/mysql.err",
+ "usr/local/mysql/data/mysql-bin.log",
+ "usr/local/mysql/data/mysql-slow.log",
+ "usr/local/mysql/data/mysqlderror.log",
+ "usr/local/mysql/data/{host}.err",
+ "usr/local/mysql/data/mysql-bin.index",
+ "var/lib/mysql/my.cnf",
+ "etc/mysql/my.cnf",
+ "etc/my.cnf",
+ "program files/mysql/mysql server 5.0/my.ini",
+ "program files/mysql/mysql server 5.0/my.cnf",
+ "program files/mysql/my.ini",
+ "program files/mysql/my.cnf",
+ "mysql/my.ini",
+ "mysql/my.cnf",
+ "mysql/bin/my.ini",
+ "var/postgresql/log/postgresql.log",
+ "usr/internet/pgsql/data/postmaster.log",
+ "usr/local/pgsql/data/postgresql.log",
+ "usr/local/pgsql/data/pg_log",
+ "postgresql/log/pgadmin.log",
+ "var/lib/pgsql/data/postgresql.conf",
+ "var/postgresql/db/postgresql.conf",
+ "var/nm2/postgresql.conf",
+ "usr/local/pgsql/data/postgresql.conf",
+ "usr/local/pgsql/data/pg_hba.conf",
+ "usr/internet/pgsql/data/pg_hba.conf",
+ "usr/local/pgsql/data/passwd",
+ "usr/local/pgsql/bin/pg_passwd",
+ "etc/postgresql/postgresql.conf",
+ "etc/postgresql/pg_hba.conf",
+ "home/postgres/data/postgresql.conf",
+ "home/postgres/data/pg_version",
+ "home/postgres/data/pg_ident.conf",
+ "home/postgres/data/pg_hba.conf",
+ "program files/postgresql/8.3/data/pg_hba.conf",
+ "program files/postgresql/8.3/data/pg_ident.conf",
+ "program files/postgresql/8.3/data/postgresql.conf",
+ "program files/postgresql/8.4/data/pg_hba.conf",
+ "program files/postgresql/8.4/data/pg_ident.conf",
+ "program files/postgresql/8.4/data/postgresql.conf",
+ "program files/postgresql/9.0/data/pg_hba.conf",
+ "program files/postgresql/9.0/data/pg_ident.conf",
+ "program files/postgresql/9.0/data/postgresql.conf",
+ "program files/postgresql/9.1/data/pg_hba.conf",
+ "program files/postgresql/9.1/data/pg_ident.conf",
+ "program files/postgresql/9.1/data/postgresql.conf",
+ "wamp/logs/access.log",
+ "wamp/logs/apache_error.log",
+ "wamp/logs/genquery.log",
+ "wamp/logs/mysql.log",
+ "wamp/logs/slowquery.log",
+ "wamp/bin/apache/apache2.2.22/logs/access.log",
+ "wamp/bin/apache/apache2.2.22/logs/error.log",
+ "wamp/bin/apache/apache2.2.21/logs/access.log",
+ "wamp/bin/apache/apache2.2.21/logs/error.log",
+ "wamp/bin/mysql/mysql5.5.24/data/mysql-bin.index",
+ "wamp/bin/mysql/mysql5.5.16/data/mysql-bin.index",
+ "wamp/bin/apache/apache2.2.21/conf/httpd.conf",
+ "wamp/bin/apache/apache2.2.22/conf/httpd.conf",
+ "wamp/bin/apache/apache2.2.21/wampserver.conf",
+ "wamp/bin/apache/apache2.2.22/wampserver.conf",
+ "wamp/bin/apache/apache2.2.22/conf/wampserver.conf",
+ "wamp/bin/mysql/mysql5.5.24/my.ini",
+ "wamp/bin/mysql/mysql5.5.24/wampserver.conf",
+ "wamp/bin/mysql/mysql5.5.16/my.ini",
+ "wamp/bin/mysql/mysql5.5.16/wampserver.conf",
+ "wamp/bin/php/php5.3.8/php.ini",
+ "wamp/bin/php/php5.4.3/php.ini",
+ "xampp/apache/logs/access.log",
+ "xampp/apache/logs/error.log",
+ "xampp/mysql/data/mysql-bin.index",
+ "xampp/mysql/data/mysql.err",
+ "xampp/mysql/data/{host}.err",
+ "xampp/sendmail/sendmail.log",
+ "xampp/apache/conf/httpd.conf",
+ "xampp/filezillaftp/filezilla server.xml",
+ "xampp/mercurymail/mercury.ini",
+ "xampp/php/php.ini",
+ "xampp/phpmyadmin/config.inc.php",
+ "xampp/sendmail/sendmail.ini",
+ "xampp/webalizer/webalizer.conf",
+ "opt/lampp/etc/httpd.conf",
+ "xampp/htdocs/aca.txt",
+ "xampp/htdocs/admin.php",
+ "xampp/htdocs/leer.txt",
+ "usr/local/apache/logs/audit_log",
+ "usr/local/apache2/logs/audit_log",
+ "logs/security_debug_log",
+ "logs/security_log",
+ "usr/local/apache/conf/modsec.conf",
+ "usr/local/apache2/conf/modsec.conf",
+ "winnt/system32/logfiles/msftpsvc",
+ "winnt/system32/logfiles/msftpsvc1",
+ "winnt/system32/logfiles/msftpsvc2",
+ "windows/system32/logfiles/msftpsvc",
+ "windows/system32/logfiles/msftpsvc1",
+ "windows/system32/logfiles/msftpsvc2",
+ "etc/logrotate.d/proftpd",
+ "www/logs/proftpd.system.log",
+ "etc/pam.d/proftpd",
+ "etc/proftp.conf",
+ "etc/protpd/proftpd.conf",
+ "etc/vhcs2/proftpd/proftpd.conf",
+ "etc/proftpd/modules.conf",
+ "etc/vsftpd.chroot_list",
+ "etc/logrotate.d/vsftpd.log",
+ "etc/vsftpd/vsftpd.conf",
+ "etc/vsftpd.conf",
+ "etc/chrootusers",
+ "var/adm/log/xferlog",
+ "etc/wu-ftpd/ftpaccess",
+ "etc/wu-ftpd/ftphosts",
+ "etc/wu-ftpd/ftpusers",
+ "logs/pure-ftpd.log",
+ "usr/sbin/pure-config.pl",
+ "usr/etc/pure-ftpd.conf",
+ "etc/pure-ftpd/pure-ftpd.conf",
+ "usr/local/etc/pure-ftpd.conf",
+ "usr/local/etc/pureftpd.pdb",
+ "usr/local/pureftpd/etc/pureftpd.pdb",
+ "usr/local/pureftpd/sbin/pure-config.pl",
+ "usr/local/pureftpd/etc/pure-ftpd.conf",
+ "etc/pure-ftpd.conf",
+ "etc/pure-ftpd/pure-ftpd.pdb",
+ "etc/pureftpd.pdb",
+ "etc/pureftpd.passwd",
+ "etc/pure-ftpd/pureftpd.pdb",
+ "usr/ports/ftp/pure-ftpd/pure-ftpd.conf",
+ "usr/ports/ftp/pure-ftpd/pureftpd.pdb",
+ "usr/ports/ftp/pure-ftpd/pureftpd.passwd",
+ "usr/ports/net/pure-ftpd/pure-ftpd.conf",
+ "usr/ports/net/pure-ftpd/pureftpd.pdb",
+ "usr/ports/net/pure-ftpd/pureftpd.passwd",
+ "usr/pkgsrc/net/pureftpd/pure-ftpd.conf",
+ "usr/pkgsrc/net/pureftpd/pureftpd.pdb",
+ "usr/pkgsrc/net/pureftpd/pureftpd.passwd",
+ "usr/ports/contrib/pure-ftpd/pure-ftpd.conf",
+ "usr/ports/contrib/pure-ftpd/pureftpd.pdb",
+ "usr/ports/contrib/pure-ftpd/pureftpd.passwd",
+ "usr/sbin/mudlogd",
+ "etc/muddleftpd/mudlog",
+ "etc/muddleftpd.com",
+ "etc/muddleftpd/mudlogd.conf",
+ "etc/muddleftpd/muddleftpd.conf",
+ "usr/sbin/mudpasswd",
+ "etc/muddleftpd/muddleftpd.passwd",
+ "etc/muddleftpd/passwd",
+ "etc/logrotate.d/ftp",
+ "etc/ftpchroot",
+ "etc/ftphosts",
+ "etc/ftpusers",
+ "winnt/system32/logfiles/smtpsvc",
+ "winnt/system32/logfiles/smtpsvc1",
+ "winnt/system32/logfiles/smtpsvc2",
+ "winnt/system32/logfiles/smtpsvc3",
+ "winnt/system32/logfiles/smtpsvc4",
+ "winnt/system32/logfiles/smtpsvc5",
+ "windows/system32/logfiles/smtpsvc",
+ "windows/system32/logfiles/smtpsvc1",
+ "windows/system32/logfiles/smtpsvc2",
+ "windows/system32/logfiles/smtpsvc3",
+ "windows/system32/logfiles/smtpsvc4",
+ "windows/system32/logfiles/smtpsvc5",
+ "etc/osxhttpd/osxhttpd.conf",
+ "system/library/webobjects/adaptors/apache2.2/apache.conf",
+ "etc/apache2/sites-available/default",
+ "etc/apache2/sites-available/default-ssl",
+ "etc/apache2/sites-enabled/000-default",
+ "etc/apache2/sites-enabled/default",
+ "etc/apache2/apache2.conf",
+ "etc/apache2/ports.conf",
+ "usr/local/etc/apache/httpd.conf",
+ "usr/pkg/etc/httpd/httpd.conf",
+ "usr/pkg/etc/httpd/httpd-default.conf",
+ "usr/pkg/etc/httpd/httpd-vhosts.conf",
+ "etc/httpd/mod_php.conf",
+ "etc/httpd/extra/httpd-ssl.conf",
+ "etc/rc.d/rc.httpd",
+ "usr/local/apache/conf/httpd.conf.default",
+ "usr/local/apache/conf/access.conf",
+ "usr/local/apache22/conf/httpd.conf",
+ "usr/local/apache22/httpd.conf",
+ "usr/local/etc/apache22/conf/httpd.conf",
+ "usr/local/apps/apache22/conf/httpd.conf",
+ "etc/apache22/conf/httpd.conf",
+ "etc/apache22/httpd.conf",
+ "opt/apache22/conf/httpd.conf",
+ "usr/local/etc/apache2/vhosts.conf",
+ "usr/local/apache/conf/vhosts.conf",
+ "usr/local/apache2/conf/vhosts.conf",
+ "usr/local/apache/conf/vhosts-custom.conf",
+ "usr/local/apache2/conf/vhosts-custom.conf",
+ "etc/apache/default-server.conf",
+ "etc/apache2/default-server.conf",
+ "usr/local/apache2/conf/extra/httpd-ssl.conf",
+ "usr/local/apache2/conf/ssl.conf",
+ "etc/httpd/conf.d",
+ "usr/local/etc/apache22/httpd.conf",
+ "usr/local/etc/apache2/httpd.conf",
+ "etc/apache2/httpd2.conf",
+ "etc/apache2/ssl-global.conf",
+ "etc/apache2/vhosts.d/00_default_vhost.conf",
+ "apache/conf/httpd.conf",
+ "etc/apache/httpd.conf",
+ "etc/httpd/conf",
+ "http/httpd.conf",
+ "usr/local/apache1.3/conf/httpd.conf",
+ "usr/local/etc/httpd/conf",
+ "var/apache/conf/httpd.conf",
+ "var/www/conf",
+ "www/apache/conf/httpd.conf",
+ "www/conf/httpd.conf",
+ "etc/init.d",
+ "etc/apache/access.conf",
+ "etc/rc.conf",
+ "www/logs/freebsddiary-error.log",
+ "www/logs/freebsddiary-access_log",
+ "library/webserver/documents/index.html",
+ "library/webserver/documents/index.htm",
+ "library/webserver/documents/default.html",
+ "library/webserver/documents/default.htm",
+ "library/webserver/documents/index.php",
+ "library/webserver/documents/default.php",
+ "usr/local/etc/webmin/miniserv.conf",
+ "etc/webmin/miniserv.conf",
+ "usr/local/etc/webmin/miniserv.users",
+ "etc/webmin/miniserv.users",
+ "winnt/system32/logfiles/w3svc/inetsvn1.log",
+ "winnt/system32/logfiles/w3svc1/inetsvn1.log",
+ "winnt/system32/logfiles/w3svc2/inetsvn1.log",
+ "winnt/system32/logfiles/w3svc3/inetsvn1.log",
+ "windows/system32/logfiles/w3svc/inetsvn1.log",
+ "windows/system32/logfiles/w3svc1/inetsvn1.log",
+ "windows/system32/logfiles/w3svc2/inetsvn1.log",
+ "windows/system32/logfiles/w3svc3/inetsvn1.log",
+ "apache/logs/error.log",
+ "apache/logs/access.log",
+ "apache2/logs/error.log",
+ "apache2/logs/access.log",
+ "logs/error.log",
+ "logs/access.log",
+ "etc/httpd/logs/access_log",
+ "etc/httpd/logs/access.log",
+ "etc/httpd/logs/error_log",
+ "etc/httpd/logs/error.log",
+ "usr/local/apache/logs/access_log",
+ "usr/local/apache/logs/access.log",
+ "usr/local/apache/logs/error_log",
+ "usr/local/apache/logs/error.log",
+ "usr/local/apache2/logs/access_log",
+ "usr/local/apache2/logs/access.log",
+ "usr/local/apache2/logs/error_log",
+ "usr/local/apache2/logs/error.log",
+ "var/www/logs/access_log",
+ "var/www/logs/access.log",
+ "var/www/logs/error_log",
+ "var/www/logs/error.log",
+ "opt/lampp/logs/access_log",
+ "opt/lampp/logs/error_log",
+ "opt/xampp/logs/access_log",
+ "opt/xampp/logs/error_log",
+ "opt/lampp/logs/access.log",
+ "opt/lampp/logs/error.log",
+ "opt/xampp/logs/access.log",
+ "opt/xampp/logs/error.log",
+ "program files/apache group/apache/logs/access.log",
+ "program files/apache group/apache/logs/error.log",
+ "program files/apache software foundation/apache2.2/logs/error.log",
+ "program files/apache software foundation/apache2.2/logs/access.log",
+ "opt/apache/apache.conf",
+ "opt/apache/conf/apache.conf",
+ "opt/apache2/apache.conf",
+ "opt/apache2/conf/apache.conf",
+ "opt/httpd/apache.conf",
+ "opt/httpd/conf/apache.conf",
+ "etc/httpd/apache.conf",
+ "etc/apache2/apache.conf",
+ "etc/httpd/conf/apache.conf",
+ "usr/local/apache/apache.conf",
+ "usr/local/apache/conf/apache.conf",
+ "usr/local/apache2/apache.conf",
+ "usr/local/apache2/conf/apache.conf",
+ "usr/local/php/apache.conf.php",
+ "usr/local/php4/apache.conf.php",
+ "usr/local/php5/apache.conf.php",
+ "usr/local/php/apache.conf",
+ "usr/local/php4/apache.conf",
+ "usr/local/php5/apache.conf",
+ "private/etc/httpd/apache.conf",
+ "opt/apache/apache2.conf",
+ "opt/apache/conf/apache2.conf",
+ "opt/apache2/apache2.conf",
+ "opt/apache2/conf/apache2.conf",
+ "opt/httpd/apache2.conf",
+ "opt/httpd/conf/apache2.conf",
+ "etc/httpd/apache2.conf",
+ "etc/httpd/conf/apache2.conf",
+ "usr/local/apache/apache2.conf",
+ "usr/local/apache/conf/apache2.conf",
+ "usr/local/apache2/apache2.conf",
+ "usr/local/apache2/conf/apache2.conf",
+ "usr/local/php/apache2.conf.php",
+ "usr/local/php4/apache2.conf.php",
+ "usr/local/php5/apache2.conf.php",
+ "usr/local/php/apache2.conf",
+ "usr/local/php4/apache2.conf",
+ "usr/local/php5/apache2.conf",
+ "private/etc/httpd/apache2.conf",
+ "usr/local/apache/conf/httpd.conf",
+ "usr/local/apache2/conf/httpd.conf",
+ "etc/httpd/conf/httpd.conf",
+ "etc/apache/apache.conf",
+ "etc/apache/conf/httpd.conf",
+ "etc/apache2/httpd.conf",
+ "usr/apache2/conf/httpd.conf",
+ "usr/apache/conf/httpd.conf",
+ "usr/local/etc/apache/conf/httpd.conf",
+ "usr/local/apache/httpd.conf",
+ "usr/local/apache2/httpd.conf",
+ "usr/local/httpd/conf/httpd.conf",
+ "usr/local/etc/apache2/conf/httpd.conf",
+ "usr/local/etc/httpd/conf/httpd.conf",
+ "usr/local/apps/apache2/conf/httpd.conf",
+ "usr/local/apps/apache/conf/httpd.conf",
+ "usr/local/php/httpd.conf.php",
+ "usr/local/php4/httpd.conf.php",
+ "usr/local/php5/httpd.conf.php",
+ "usr/local/php/httpd.conf",
+ "usr/local/php4/httpd.conf",
+ "usr/local/php5/httpd.conf",
+ "etc/apache2/conf/httpd.conf",
+ "etc/http/conf/httpd.conf",
+ "etc/httpd/httpd.conf",
+ "etc/http/httpd.conf",
+ "etc/httpd.conf",
+ "opt/apache/conf/httpd.conf",
+ "opt/apache2/conf/httpd.conf",
+ "var/www/conf/httpd.conf",
+ "private/etc/httpd/httpd.conf",
+ "private/etc/httpd/httpd.conf.default",
+ "etc/apache2/vhosts.d/default_vhost.include",
+ "etc/apache2/conf.d/charset",
+ "etc/apache2/conf.d/security",
+ "etc/apache2/envvars",
+ "etc/apache2/mods-available/autoindex.conf",
+ "etc/apache2/mods-available/deflate.conf",
+ "etc/apache2/mods-available/dir.conf",
+ "etc/apache2/mods-available/mem_cache.conf",
+ "etc/apache2/mods-available/mime.conf",
+ "etc/apache2/mods-available/proxy.conf",
+ "etc/apache2/mods-available/setenvif.conf",
+ "etc/apache2/mods-available/ssl.conf",
+ "etc/apache2/mods-enabled/alias.conf",
+ "etc/apache2/mods-enabled/deflate.conf",
+ "etc/apache2/mods-enabled/dir.conf",
+ "etc/apache2/mods-enabled/mime.conf",
+ "etc/apache2/mods-enabled/negotiation.conf",
+ "etc/apache2/mods-enabled/php5.conf",
+ "etc/apache2/mods-enabled/status.conf",
+ "program files/apache group/apache/conf/httpd.conf",
+ "program files/apache group/apache2/conf/httpd.conf",
+ "program files/xampp/apache/conf/apache.conf",
+ "program files/xampp/apache/conf/apache2.conf",
+ "program files/xampp/apache/conf/httpd.conf",
+ "program files/apache group/apache/apache.conf",
+ "program files/apache group/apache/conf/apache.conf",
+ "program files/apache group/apache2/conf/apache.conf",
+ "program files/apache group/apache/apache2.conf",
+ "program files/apache group/apache/conf/apache2.conf",
+ "program files/apache group/apache2/conf/apache2.conf",
+ "program files/apache software foundation/apache2.2/conf/httpd.conf",
+ "volumes/macintosh_hd1/opt/httpd/conf/httpd.conf",
+ "volumes/macintosh_hd1/opt/apache/conf/httpd.conf",
+ "volumes/macintosh_hd1/opt/apache2/conf/httpd.conf",
+ "volumes/macintosh_hd1/usr/local/php/httpd.conf.php",
+ "volumes/macintosh_hd1/usr/local/php4/httpd.conf.php",
+ "volumes/macintosh_hd1/usr/local/php5/httpd.conf.php",
+ "volumes/webbackup/opt/apache2/conf/httpd.conf",
+ "volumes/webbackup/private/etc/httpd/httpd.conf",
+ "volumes/webbackup/private/etc/httpd/httpd.conf.default",
+ "usr/local/etc/apache/vhosts.conf",
+ "usr/local/jakarta/tomcat/conf/jakarta.conf",
+ "usr/local/jakarta/tomcat/conf/server.xml",
+ "usr/local/jakarta/tomcat/conf/context.xml",
+ "usr/local/jakarta/tomcat/conf/workers.properties",
+ "usr/local/jakarta/tomcat/conf/logging.properties",
+ "usr/local/jakarta/dist/tomcat/conf/jakarta.conf",
+ "usr/local/jakarta/dist/tomcat/conf/server.xml",
+ "usr/local/jakarta/dist/tomcat/conf/context.xml",
+ "usr/local/jakarta/dist/tomcat/conf/workers.properties",
+ "usr/local/jakarta/dist/tomcat/conf/logging.properties",
+ "usr/share/tomcat6/conf/server.xml",
+ "usr/share/tomcat6/conf/context.xml",
+ "usr/share/tomcat6/conf/workers.properties",
+ "usr/share/tomcat6/conf/logging.properties",
+ "var/cpanel/tomcat.options",
+ "usr/local/jakarta/tomcat/logs/catalina.out",
+ "usr/local/jakarta/tomcat/logs/catalina.err",
+ "opt/tomcat/logs/catalina.out",
+ "opt/tomcat/logs/catalina.err",
+ "usr/share/logs/catalina.out",
+ "usr/share/logs/catalina.err",
+ "usr/share/tomcat/logs/catalina.out",
+ "usr/share/tomcat/logs/catalina.err",
+ "usr/share/tomcat6/logs/catalina.out",
+ "usr/share/tomcat6/logs/catalina.err",
+ "usr/local/apache/logs/mod_jk.log",
+ "usr/local/jakarta/tomcat/logs/mod_jk.log",
+ "usr/local/jakarta/dist/tomcat/logs/mod_jk.log",
+ "opt/[jboss]/server/default/conf/jboss-minimal.xml",
+ "opt/[jboss]/server/default/conf/jboss-service.xml",
+ "opt/[jboss]/server/default/conf/jndi.properties",
+ "opt/[jboss]/server/default/conf/log4j.xml",
+ "opt/[jboss]/server/default/conf/login-config.xml",
+ "opt/[jboss]/server/default/conf/standardjaws.xml",
+ "opt/[jboss]/server/default/conf/standardjboss.xml",
+ "opt/[jboss]/server/default/conf/server.log.properties",
+ "opt/[jboss]/server/default/deploy/jboss-logging.xml",
+ "usr/local/[jboss]/server/default/conf/jboss-minimal.xml",
+ "usr/local/[jboss]/server/default/conf/jboss-service.xml",
+ "usr/local/[jboss]/server/default/conf/jndi.properties",
+ "usr/local/[jboss]/server/default/conf/log4j.xml",
+ "usr/local/[jboss]/server/default/conf/login-config.xml",
+ "usr/local/[jboss]/server/default/conf/standardjaws.xml",
+ "usr/local/[jboss]/server/default/conf/standardjboss.xml",
+ "usr/local/[jboss]/server/default/conf/server.log.properties",
+ "usr/local/[jboss]/server/default/deploy/jboss-logging.xml",
+ "private/tmp/[jboss]/server/default/conf/jboss-minimal.xml",
+ "private/tmp/[jboss]/server/default/conf/jboss-service.xml",
+ "private/tmp/[jboss]/server/default/conf/jndi.properties",
+ "private/tmp/[jboss]/server/default/conf/log4j.xml",
+ "private/tmp/[jboss]/server/default/conf/login-config.xml",
+ "private/tmp/[jboss]/server/default/conf/standardjaws.xml",
+ "private/tmp/[jboss]/server/default/conf/standardjboss.xml",
+ "private/tmp/[jboss]/server/default/conf/server.log.properties",
+ "private/tmp/[jboss]/server/default/deploy/jboss-logging.xml",
+ "tmp/[jboss]/server/default/conf/jboss-minimal.xml",
+ "tmp/[jboss]/server/default/conf/jboss-service.xml",
+ "tmp/[jboss]/server/default/conf/jndi.properties",
+ "tmp/[jboss]/server/default/conf/log4j.xml",
+ "tmp/[jboss]/server/default/conf/login-config.xml",
+ "tmp/[jboss]/server/default/conf/standardjaws.xml",
+ "tmp/[jboss]/server/default/conf/standardjboss.xml",
+ "tmp/[jboss]/server/default/conf/server.log.properties",
+ "tmp/[jboss]/server/default/deploy/jboss-logging.xml",
+ "program files/[jboss]/server/default/conf/jboss-minimal.xml",
+ "program files/[jboss]/server/default/conf/jboss-service.xml",
+ "program files/[jboss]/server/default/conf/jndi.properties",
+ "program files/[jboss]/server/default/conf/log4j.xml",
+ "program files/[jboss]/server/default/conf/login-config.xml",
+ "program files/[jboss]/server/default/conf/standardjaws.xml",
+ "program files/[jboss]/server/default/conf/standardjboss.xml",
+ "program files/[jboss]/server/default/conf/server.log.properties",
+ "program files/[jboss]/server/default/deploy/jboss-logging.xml",
+ "[jboss]/server/default/conf/jboss-minimal.xml",
+ "[jboss]/server/default/conf/jboss-service.xml",
+ "[jboss]/server/default/conf/jndi.properties",
+ "[jboss]/server/default/conf/log4j.xml",
+ "[jboss]/server/default/conf/login-config.xml",
+ "[jboss]/server/default/conf/standardjaws.xml",
+ "[jboss]/server/default/conf/standardjboss.xml",
+ "[jboss]/server/default/conf/server.log.properties",
+ "[jboss]/server/default/deploy/jboss-logging.xml",
+ "opt/[jboss]/server/default/log/server.log",
+ "opt/[jboss]/server/default/log/boot.log",
+ "usr/local/[jboss]/server/default/log/server.log",
+ "usr/local/[jboss]/server/default/log/boot.log",
+ "private/tmp/[jboss]/server/default/log/server.log",
+ "private/tmp/[jboss]/server/default/log/boot.log",
+ "tmp/[jboss]/server/default/log/server.log",
+ "tmp/[jboss]/server/default/log/boot.log",
+ "program files/[jboss]/server/default/log/server.log",
+ "program files/[jboss]/server/default/log/boot.log",
+ "[jboss]/server/default/log/server.log",
+ "[jboss]/server/default/log/boot.log",
+ "var/lighttpd.log",
+ "var/logs/access.log",
+ "usr/local/apache2/logs/lighttpd.error.log",
+ "usr/local/apache2/logs/lighttpd.log",
+ "usr/local/apache/logs/lighttpd.error.log",
+ "usr/local/apache/logs/lighttpd.log",
+ "usr/local/lighttpd/log/lighttpd.error.log",
+ "usr/local/lighttpd/log/access.log",
+ "usr/home/user/var/log/lighttpd.error.log",
+ "usr/home/user/var/log/apache.log",
+ "home/user/lighttpd/lighttpd.conf",
+ "usr/home/user/lighttpd/lighttpd.conf",
+ "etc/lighttpd/lighthttpd.conf",
+ "usr/local/etc/lighttpd.conf",
+ "usr/local/lighttpd/conf/lighttpd.conf",
+ "usr/local/etc/lighttpd.conf.new",
+ "var/www/.lighttpdpassword",
+ "logs/access_log",
+ "logs/error_log",
+ "etc/nginx/nginx.conf",
+ "usr/local/etc/nginx/nginx.conf",
+ "usr/local/nginx/conf/nginx.conf",
+ "usr/local/zeus/web/global.cfg",
+ "usr/local/zeus/web/log/errors",
+ "opt/lsws/conf/httpd_conf.xml",
+ "usr/local/lsws/conf/httpd_conf.xml",
+ "opt/lsws/logs/error.log",
+ "opt/lsws/logs/access.log",
+ "usr/local/lsws/logs/error.log",
+ "usr/local/logs/access.log",
+ "usr/local/samba/lib/log.user",
+ "usr/local/logs/samba.log",
+ "etc/samba/netlogon",
+ "etc/smbpasswd",
+ "etc/smb.conf",
+ "etc/samba/dhcp.conf",
+ "etc/samba/smb.conf",
+ "etc/samba/samba.conf",
+ "etc/samba/smb.conf.user",
+ "etc/samba/smbpasswd",
+ "etc/samba/smbusers",
+ "etc/samba/private/smbpasswd",
+ "usr/local/etc/smb.conf",
+ "usr/local/samba/lib/smb.conf.user",
+ "etc/dhcp3/dhclient.conf",
+ "etc/dhcp3/dhcpd.conf",
+ "etc/dhcp/dhclient.conf",
+ "program files/vidalia bundle/polipo/polipo.conf",
+ "etc/tor/tor-tsocks.conf",
+ "etc/stunnel/stunnel.conf",
+ "etc/tsocks.conf",
+ "etc/tinyproxy/tinyproxy.conf",
+ "etc/miredo-server.conf",
+ "etc/miredo.conf",
+ "etc/miredo/miredo-server.conf",
+ "etc/miredo/miredo.conf",
+ "etc/wicd/dhclient.conf.template.default",
+ "etc/wicd/manager-settings.conf",
+ "etc/wicd/wired-settings.conf",
+ "etc/wicd/wireless-settings.conf",
+ "etc/ipfw.rules",
+ "etc/ipfw.conf",
+ "etc/firewall.rules",
+ "winnt/system32/logfiles/firewall/pfirewall.log",
+ "winnt/system32/logfiles/firewall/pfirewall.log.old",
+ "windows/system32/logfiles/firewall/pfirewall.log",
+ "windows/system32/logfiles/firewall/pfirewall.log.old",
+ "etc/clamav/clamd.conf",
+ "etc/clamav/freshclam.conf",
+ "etc/x11/xorg.conf",
+ "etc/x11/xorg.conf-vesa",
+ "etc/x11/xorg.conf-vmware",
+ "etc/x11/xorg.conf.beforevmwaretoolsinstall",
+ "etc/x11/xorg.conf.orig",
+ "etc/bluetooth/input.conf",
+ "etc/bluetooth/main.conf",
+ "etc/bluetooth/network.conf",
+ "etc/bluetooth/rfcomm.conf",
+ "etc/bash_completion.d/debconf",
+ "root/.bash_logout",
+ "root/.bash_history",
+ "root/.bash_config",
+ "root/.bashrc",
+ "etc/bash.bashrc",
+ "var/adm/syslog",
+ "var/adm/sulog",
+ "var/adm/utmp",
+ "var/adm/utmpx",
+ "var/adm/wtmp",
+ "var/adm/wtmpx",
+ "var/adm/lastlog/username",
+ "usr/spool/lp/log",
+ "var/adm/lp/lpd-errs",
+ "usr/lib/cron/log",
+ "var/adm/loginlog",
+ "var/adm/pacct",
+ "var/adm/dtmp",
+ "var/adm/acct/sum/loginlog",
+ "var/adm/x0msgs",
+ "var/adm/crash/vmcore",
+ "var/adm/crash/unix",
+ "etc/newsyslog.conf",
+ "var/adm/qacct",
+ "var/adm/ras/errlog",
+ "var/adm/ras/bootlog",
+ "var/adm/cron/log",
+ "etc/utmp",
+ "etc/security/lastlog",
+ "etc/security/failedlogin",
+ "usr/spool/mqueue/syslog",
+ "var/adm/messages",
+ "var/adm/aculogs",
+ "var/adm/aculog",
+ "var/adm/vold.log",
+ "var/adm/log/asppp.log",
+ "var/lp/logs/lpsched",
+ "var/lp/logs/lpnet",
+ "var/lp/logs/requests",
+ "var/cron/log",
+ "var/saf/_log",
+ "var/saf/port/log",
+ "tmp/access.log",
+ "etc/sensors.conf",
+ "etc/sensors3.conf",
+ "etc/host.conf",
+ "etc/pam.conf",
+ "etc/resolv.conf",
+ "etc/apt/apt.conf",
+ "etc/inetd.conf",
+ "etc/syslog.conf",
+ "etc/sysctl.conf",
+ "etc/sysctl.d/10-console-messages.conf",
+ "etc/sysctl.d/10-network-security.conf",
+ "etc/sysctl.d/10-process-security.conf",
+ "etc/sysctl.d/wine.sysctl.conf",
+ "etc/security/access.conf",
+ "etc/security/group.conf",
+ "etc/security/limits.conf",
+ "etc/security/namespace.conf",
+ "etc/security/pam_env.conf",
+ "etc/security/sepermit.conf",
+ "etc/security/time.conf",
+ "etc/ssh/sshd_config",
+ "etc/adduser.conf",
+ "etc/deluser.conf",
+ "etc/avahi/avahi-daemon.conf",
+ "etc/ca-certificates.conf",
+ "etc/ca-certificates.conf.dpkg-old",
+ "etc/casper.conf",
+ "etc/chkrootkit.conf",
+ "etc/debconf.conf",
+ "etc/dns2tcpd.conf",
+ "etc/e2fsck.conf",
+ "etc/esound/esd.conf",
+ "etc/etter.conf",
+ "etc/fuse.conf",
+ "etc/foremost.conf",
+ "etc/hdparm.conf",
+ "etc/kernel-img.conf",
+ "etc/kernel-pkg.conf",
+ "etc/ld.so.conf",
+ "etc/ltrace.conf",
+ "etc/mail/sendmail.conf",
+ "etc/manpath.config",
+ "etc/kbd/config",
+ "etc/ldap/ldap.conf",
+ "etc/logrotate.conf",
+ "etc/mtools.conf",
+ "etc/smi.conf",
+ "etc/updatedb.conf",
+ "etc/pulse/client.conf",
+ "usr/share/adduser/adduser.conf",
+ "etc/hostname",
+ "etc/networks",
+ "etc/timezone",
+ "etc/modules",
+ "etc/passwd",
+ "etc/shadow",
+ "etc/fstab",
+ "etc/motd",
+ "etc/hosts",
+ "etc/group",
+ "etc/alias",
+ "etc/crontab",
+ "etc/crypttab",
+ "etc/exports",
+ "etc/mtab",
+ "etc/hosts.allow",
+ "etc/hosts.deny",
+ "etc/os-release",
+ "etc/password.master",
+ "etc/profile",
+ "etc/default/grub",
+ "etc/resolvconf/update-libc.d/sendmail",
+ "etc/inittab",
+ "etc/issue",
+ "etc/issue.net",
+ "etc/login.defs",
+ "etc/sudoers",
+ "etc/sysconfig/network-scripts/ifcfg-eth0",
+ "etc/redhat-release",
+ "etc/scw-release",
+ "etc/system-release-cpe",
+ "etc/debian_version",
+ "etc/fedora-release",
+ "etc/mandrake-release",
+ "etc/slackware-release",
+ "etc/suse-release",
+ "etc/security/group",
+ "etc/security/passwd",
+ "etc/security/user",
+ "etc/security/environ",
+ "etc/security/limits",
+ "etc/security/opasswd",
+ "boot/grub/grub.cfg",
+ "boot/grub/menu.lst",
+ "root/.ksh_history",
+ "root/.xauthority",
+ "usr/lib/security/mkuser.default",
+ "var/lib/squirrelmail/prefs/squirrelmail.log",
+ "etc/squirrelmail/apache.conf",
+ "etc/squirrelmail/config_local.php",
+ "etc/squirrelmail/default_pref",
+ "etc/squirrelmail/index.php",
+ "etc/squirrelmail/config_default.php",
+ "etc/squirrelmail/config.php",
+ "etc/squirrelmail/filters_setup.php",
+ "etc/squirrelmail/sqspell_config.php",
+ "etc/squirrelmail/config/config.php",
+ "etc/httpd/conf.d/squirrelmail.conf",
+ "usr/share/squirrelmail/config/config.php",
+ "private/etc/squirrelmail/config/config.php",
+ "srv/www/htdos/squirrelmail/config/config.php",
+ "var/www/squirrelmail/config/config.php",
+ "var/www/html/squirrelmail/config/config.php",
+ "var/www/html/squirrelmail-1.2.9/config/config.php",
+ "usr/share/squirrelmail/plugins/squirrel_logger/setup.php",
+ "usr/local/squirrelmail/www/readme",
+ "windows/system32/drivers/etc/hosts",
+ "windows/system32/drivers/etc/lmhosts.sam",
+ "windows/system32/drivers/etc/networks",
+ "windows/system32/drivers/etc/protocol",
+ "windows/system32/drivers/etc/services",
+ "/boot.ini",
+ "windows/debug/netsetup.log",
+ "windows/comsetup.log",
+ "windows/repair/setup.log",
+ "windows/setupact.log",
+ "windows/setupapi.log",
+ "windows/setuperr.log",
+ "windows/updspapi.log",
+ "windows/wmsetup.log",
+ "windows/windowsupdate.log",
+ "windows/odbc.ini",
+ "usr/local/psa/admin/htdocs/domains/databases/phpmyadmin/libraries/config.default.php",
+ "etc/apache2/conf.d/phpmyadmin.conf",
+ "etc/phpmyadmin/config.inc.php",
+ "etc/openldap/ldap.conf",
+ "etc/cups/acroread.conf",
+ "etc/cups/cupsd.conf",
+ "etc/cups/cupsd.conf.default",
+ "etc/cups/pdftops.conf",
+ "etc/cups/printers.conf",
+ "windows/system32/macromed/flash/flashinstall.log",
+ "windows/system32/macromed/flash/install.log",
+ "etc/cvs-cron.conf",
+ "etc/cvs-pserver.conf",
+ "etc/subversion/config",
+ "etc/modprobe.d/vmware-tools.conf",
+ "etc/updatedb.conf.beforevmwaretoolsinstall",
+ "etc/vmware-tools/config",
+ "etc/vmware-tools/tpvmlp.conf",
+ "etc/vmware-tools/vmware-tools-libraries.conf",
+ "var/log",
+ "var/log/sw-cp-server/error_log",
+ "var/log/sso/sso.log",
+ "var/log/dpkg.log",
+ "var/log/btmp",
+ "var/log/utmp",
+ "var/log/wtmp",
+ "var/log/mysql/mysql-bin.log",
+ "var/log/mysql/mysql-bin.index",
+ "var/log/mysql/data/mysql-bin.index",
+ "var/log/mysql.log",
+ "var/log/mysql.err",
+ "var/log/mysqlderror.log",
+ "var/log/mysql/mysql.log",
+ "var/log/mysql/mysql-slow.log",
+ "var/log/mysql-bin.index",
+ "var/log/data/mysql-bin.index",
+ "var/log/postgresql/postgresql.log",
+ "var/log/postgres/pg_backup.log",
+ "var/log/postgres/postgres.log",
+ "var/log/postgresql.log",
+ "var/log/pgsql/pgsql.log",
+ "var/log/postgresql/postgresql-8.1-main.log",
+ "var/log/postgresql/postgresql-8.3-main.log",
+ "var/log/postgresql/postgresql-8.4-main.log",
+ "var/log/postgresql/postgresql-9.0-main.log",
+ "var/log/postgresql/postgresql-9.1-main.log",
+ "var/log/pgsql8.log",
+ "var/log/postgresql/postgres.log",
+ "var/log/pgsql_log",
+ "var/log/postgresql/main.log",
+ "var/log/cron",
+ "var/log/postgres.log",
+ "var/log/proftpd",
+ "var/log/proftpd/xferlog.legacy",
+ "var/log/proftpd.access_log",
+ "var/log/proftpd.xferlog",
+ "var/log/vsftpd.log",
+ "var/log/xferlog",
+ "var/log/pure-ftpd/pure-ftpd.log",
+ "var/log/pureftpd.log",
+ "var/log/muddleftpd",
+ "var/log/muddleftpd.conf",
+ "var/log/ftp-proxy/ftp-proxy.log",
+ "var/log/ftp-proxy",
+ "var/log/ftplog",
+ "var/log/exim_mainlog",
+ "var/log/exim/mainlog",
+ "var/log/maillog",
+ "var/log/exim_paniclog",
+ "var/log/exim/paniclog",
+ "var/log/exim/rejectlog",
+ "var/log/exim_rejectlog",
+ "var/log/webmin/miniserv.log",
+ "var/log/httpd/access_log",
+ "var/log/httpd/error_log",
+ "var/log/httpd/access.log",
+ "var/log/httpd/error.log",
+ "var/log/apache/access_log",
+ "var/log/apache/access.log",
+ "var/log/apache/error_log",
+ "var/log/apache/error.log",
+ "var/log/apache2/access_log",
+ "var/log/apache2/access.log",
+ "var/log/apache2/error_log",
+ "var/log/apache2/error.log",
+ "var/log/access_log",
+ "var/log/access.log",
+ "var/log/error_log",
+ "var/log/error.log",
+ "var/log/tomcat6/catalina.out",
+ "var/log/lighttpd.error.log",
+ "var/log/lighttpd.access.log",
+ "var/logs/access.log",
+ "var/log/lighttpd/",
+ "var/log/lighttpd/error.log",
+ "var/log/lighttpd/access.www.log",
+ "var/log/lighttpd/error.www.log",
+ "var/log/lighttpd/access.log",
+ "var/log/lighttpd/{domain}/access.log",
+ "var/log/lighttpd/{domain}/error.log",
+ "var/log/nginx/access_log",
+ "var/log/nginx/error_log",
+ "var/log/nginx/access.log",
+ "var/log/nginx/error.log",
+ "var/log/nginx.access_log",
+ "var/log/nginx.error_log",
+ "var/log/samba/log.smbd",
+ "var/log/samba/log.nmbd",
+ "var/log/samba.log",
+ "var/log/samba.log1",
+ "var/log/samba.log2",
+ "var/log/log.smb",
+ "var/log/ipfw.log",
+ "var/log/ipfw",
+ "var/log/ipfw/ipfw.log",
+ "var/log/ipfw.today",
+ "var/log/poplog",
+ "var/log/authlog",
+ "var/log/news.all",
+ "var/log/news/news.all",
+ "var/log/news/news.crit",
+ "var/log/news/news.err",
+ "var/log/news/news.notice",
+ "var/log/news/suck.err",
+ "var/log/news/suck.notice",
+ "var/log/messages",
+ "var/log/messages.1",
+ "var/log/user.log",
+ "var/log/user.log.1",
+ "var/log/auth.log",
+ "var/log/pm-powersave.log",
+ "var/log/xorg.0.log",
+ "var/log/daemon.log",
+ "var/log/daemon.log.1",
+ "var/log/kern.log",
+ "var/log/kern.log.1",
+ "var/log/mail.err",
+ "var/log/mail.info",
+ "var/log/mail.warn",
+ "var/log/ufw.log",
+ "var/log/boot.log",
+ "var/log/syslog",
+ "var/log/syslog.1",
+ "var/log/squirrelmail.log",
+ "var/log/apache2/squirrelmail.log",
+ "var/log/apache2/squirrelmail.err.log",
+ "var/log/mail.log",
+ "var/log/vmware/hostd.log",
+ "var/log/vmware/hostd-1.log",
+ "/wp-config.php",
+ "/wp-config.bak",
+ "/wp-config.old",
+ "/wp-config.temp",
+ "/wp-config.tmp",
+ "/wp-config.txt",
+ "/config.yml",
+ "/config_dev.yml",
+ "/config_prod.yml",
+ "/config_test.yml",
+ "/parameters.yml",
+ "/routing.yml",
+ "/security.yml",
+ "/services.yml",
+ "sites/default/default.settings.php",
+ "sites/default/settings.php",
+ "sites/default/settings.local.php",
+ "app/etc/local.xml",
+ "/sftp-config.json",
+ "/web.config",
+ "includes/config.php",
+ "includes/configure.php",
+ "/config.inc.php",
+ "/localsettings.php",
+ "inc/config.php",
+ "typo3conf/localconf.php",
+ "config/app.php",
+ "config/custom.php",
+ "config/database.php",
+ "/configuration.php",
+ "/config.php",
+ "var/mail/www-data",
+ "etc/network/",
+ "etc/init/",
+ "inetpub/wwwroot/global.asa",
+ "system32/inetsrv/config/applicationhost.config",
+ "system32/inetsrv/config/administration.config",
+ "system32/inetsrv/config/redirection.config",
+ "system32/config/default",
+ "system32/config/sam",
+ "system32/config/system",
+ "system32/config/software",
+ "winnt/repair/sam._",
+ "/package.json",
+ "/package-lock.json",
+ "/gruntfile.js",
+ "/npm-debug.log",
+ "/ormconfig.json",
+ "/tsconfig.json",
+ "/webpack.config.js",
+ "/yarn.lock",
+ "proc/0",
+ "proc/1",
+ "proc/2",
+ "proc/3",
+ "proc/4",
+ "proc/5",
+ "proc/6",
+ "proc/7",
+ "proc/8",
+ "proc/9",
+ "proc/acpi",
+ "proc/asound",
+ "proc/bootconfig",
+ "proc/buddyinfo",
+ "proc/bus",
+ "proc/cgroups",
+ "proc/cmdline",
+ "proc/config.gz",
+ "proc/consoles",
+ "proc/cpuinfo",
+ "proc/crypto",
+ "proc/devices",
+ "proc/diskstats",
+ "proc/dma",
+ "proc/docker",
+ "proc/driver",
+ "proc/dynamic_debug",
+ "proc/execdomains",
+ "proc/fb",
+ "proc/filesystems",
+ "proc/fs",
+ "proc/interrupts",
+ "proc/iomem",
+ "proc/ioports",
+ "proc/ipmi",
+ "proc/irq",
+ "proc/kallsyms",
+ "proc/kcore",
+ "proc/keys",
+ "proc/keys",
+ "proc/key-users",
+ "proc/kmsg",
+ "proc/kpagecgroup",
+ "proc/kpagecount",
+ "proc/kpageflags",
+ "proc/latency_stats",
+ "proc/loadavg",
+ "proc/locks",
+ "proc/mdstat",
+ "proc/meminfo",
+ "proc/misc",
+ "proc/modules",
+ "proc/mounts",
+ "proc/mpt",
+ "proc/mtd",
+ "proc/mtrr",
+ "proc/net",
+ "proc/net/tcp",
+ "proc/net/udp",
+ "proc/pagetypeinfo",
+ "proc/partitions",
+ "proc/pressure",
+ "proc/sched_debug",
+ "proc/schedstat",
+ "proc/scsi",
+ "proc/self",
+ "proc/self/cmdline",
+ "proc/self/environ",
+ "proc/self/fd/0",
+ "proc/self/fd/1",
+ "proc/self/fd/10",
+ "proc/self/fd/11",
+ "proc/self/fd/12",
+ "proc/self/fd/13",
+ "proc/self/fd/14",
+ "proc/self/fd/15",
+ "proc/self/fd/2",
+ "proc/self/fd/3",
+ "proc/self/fd/4",
+ "proc/self/fd/5",
+ "proc/self/fd/6",
+ "proc/self/fd/7",
+ "proc/self/fd/8",
+ "proc/self/fd/9",
+ "proc/self/mounts",
+ "proc/self/stat",
+ "proc/self/status",
+ "proc/slabinfo",
+ "proc/softirqs",
+ "proc/stat",
+ "proc/swaps",
+ "proc/sys",
+ "proc/sysrq-trigger",
+ "proc/sysvipc",
+ "proc/thread-self",
+ "proc/timer_list",
+ "proc/timer_stats",
+ "proc/tty",
+ "proc/uptime",
+ "proc/version",
+ "proc/version_signature",
+ "proc/vmallocinfo",
+ "proc/vmstat",
+ "proc/zoneinfo",
+ "sys/block",
+ "sys/bus",
+ "sys/class",
+ "sys/dev",
+ "sys/devices",
+ "sys/firmware",
+ "sys/fs",
+ "sys/hypervisor",
+ "sys/kernel",
+ "sys/module",
+ "sys/power",
+ "windows\\win.ini",
+ "default\\ntuser.dat",
+ "/var/run/secrets/kubernetes.io/serviceaccount"
+ ]
+ },
+ "operator": "phrase_match"
+ }
+ ],
+ "transformers": [
+ "lowercase",
+ "normalizePath"
+ ]
+ },
+ {
+ "id": "crs-931-110",
+ "name": "RFI: Common RFI Vulnerable Parameter Name used w/ URL Payload",
+ "tags": {
+ "type": "rfi",
+ "crs_id": "931110",
+ "category": "attack_attempt",
+ "cwe": "98",
+ "capec": "1000/152/175/253/193",
+ "confidence": "1"
+ },
+ "conditions": [
+ {
+ "parameters": {
+ "inputs": [
+ {
+ "address": "server.request.query"
+ }
+ ],
+ "regex": "(?:\\binclude\\s*\\([^)]*|mosConfig_absolute_path|_CONF\\[path\\]|_SERVER\\[DOCUMENT_ROOT\\]|GALLERY_BASEDIR|path\\[docroot\\]|appserv_root|config\\[root_dir\\])=(?:file|ftps?|https?)://",
+ "options": {
+ "min_length": 15
+ }
+ },
+ "operator": "match_regex"
+ }
+ ],
+ "transformers": []
+ },
+ {
+ "id": "crs-931-120",
+ "name": "RFI: URL Payload Used w/Trailing Question Mark Character (?)",
+ "tags": {
+ "type": "rfi",
+ "crs_id": "931120",
+ "category": "attack_attempt",
+ "cwe": "98",
+ "capec": "1000/152/175/253/193"
+ },
+ "conditions": [
+ {
+ "parameters": {
+ "inputs": [
+ {
+ "address": "server.request.query"
+ },
+ {
+ "address": "server.request.body"
+ },
+ {
+ "address": "server.request.path_params"
+ },
+ {
+ "address": "grpc.server.request.message"
+ },
+ {
+ "address": "graphql.server.all_resolvers"
+ },
+ {
+ "address": "graphql.server.resolver"
+ }
+ ],
+ "regex": "^(?i:file|ftps?)://.*?\\?+$",
+ "options": {
+ "case_sensitive": true,
+ "min_length": 4
+ }
+ },
+ "operator": "match_regex"
+ }
+ ],
+ "transformers": []
+ },
+ {
+ "id": "crs-932-160",
+ "name": "Remote Command Execution: Unix Shell Code Found",
+ "tags": {
+ "type": "command_injection",
+ "crs_id": "932160",
+ "category": "attack_attempt",
+ "cwe": "77",
+ "capec": "1000/152/248/88",
+ "confidence": "1"
+ },
+ "conditions": [
+ {
+ "parameters": {
+ "inputs": [
+ {
+ "address": "server.request.query"
+ },
+ {
+ "address": "server.request.body"
+ },
+ {
+ "address": "server.request.path_params"
+ },
+ {
+ "address": "grpc.server.request.message"
+ },
+ {
+ "address": "graphql.server.all_resolvers"
+ },
+ {
+ "address": "graphql.server.resolver"
+ }
+ ],
+ "list": [
+ "${cdpath}",
+ "${dirstack}",
+ "${home}",
+ "${hostname}",
+ "${ifs}",
+ "${oldpwd}",
+ "${ostype}",
+ "${path}",
+ "${pwd}",
+ "$cdpath",
+ "$dirstack",
+ "$home",
+ "$hostname",
+ "$ifs",
+ "$oldpwd",
+ "$ostype",
+ "$path",
+ "$pwd",
+ "dev/fd/",
+ "dev/null",
+ "dev/stderr",
+ "dev/stdin",
+ "dev/stdout",
+ "dev/tcp/",
+ "dev/udp/",
+ "dev/zero",
+ "etc/master.passwd",
+ "etc/pwd.db",
+ "etc/shells",
+ "etc/spwd.db",
+ "proc/self/",
+ "bin/7z",
+ "bin/7za",
+ "bin/7zr",
+ "bin/ab",
+ "bin/agetty",
+ "bin/ansible-playbook",
+ "bin/apt",
+ "bin/apt-get",
+ "bin/ar",
+ "bin/aria2c",
+ "bin/arj",
+ "bin/arp",
+ "bin/as",
+ "bin/ascii-xfr",
+ "bin/ascii85",
+ "bin/ash",
+ "bin/aspell",
+ "bin/at",
+ "bin/atobm",
+ "bin/awk",
+ "bin/base32",
+ "bin/base64",
+ "bin/basenc",
+ "bin/bash",
+ "bin/bpftrace",
+ "bin/bridge",
+ "bin/bundler",
+ "bin/bunzip2",
+ "bin/busctl",
+ "bin/busybox",
+ "bin/byebug",
+ "bin/bzcat",
+ "bin/bzcmp",
+ "bin/bzdiff",
+ "bin/bzegrep",
+ "bin/bzexe",
+ "bin/bzfgrep",
+ "bin/bzgrep",
+ "bin/bzip2",
+ "bin/bzip2recover",
+ "bin/bzless",
+ "bin/bzmore",
+ "bin/bzz",
+ "bin/c89",
+ "bin/c99",
+ "bin/cancel",
+ "bin/capsh",
+ "bin/cat",
+ "bin/cc",
+ "bin/certbot",
+ "bin/check_by_ssh",
+ "bin/check_cups",
+ "bin/check_log",
+ "bin/check_memory",
+ "bin/check_raid",
+ "bin/check_ssl_cert",
+ "bin/check_statusfile",
+ "bin/chmod",
+ "bin/choom",
+ "bin/chown",
+ "bin/chroot",
+ "bin/clang",
+ "bin/clang++",
+ "bin/cmp",
+ "bin/cobc",
+ "bin/column",
+ "bin/comm",
+ "bin/composer",
+ "bin/core_perl/zipdetails",
+ "bin/cowsay",
+ "bin/cowthink",
+ "bin/cp",
+ "bin/cpan",
+ "bin/cpio",
+ "bin/cpulimit",
+ "bin/crash",
+ "bin/crontab",
+ "bin/csh",
+ "bin/csplit",
+ "bin/csvtool",
+ "bin/cupsfilter",
+ "bin/curl",
+ "bin/cut",
+ "bin/dash",
+ "bin/date",
+ "bin/dd",
+ "bin/dev/fd/",
+ "bin/dev/null",
+ "bin/dev/stderr",
+ "bin/dev/stdin",
+ "bin/dev/stdout",
+ "bin/dev/tcp/",
+ "bin/dev/udp/",
+ "bin/dev/zero",
+ "bin/dialog",
+ "bin/diff",
+ "bin/dig",
+ "bin/dmesg",
+ "bin/dmidecode",
+ "bin/dmsetup",
+ "bin/dnf",
+ "bin/docker",
+ "bin/dosbox",
+ "bin/dpkg",
+ "bin/du",
+ "bin/dvips",
+ "bin/easy_install",
+ "bin/eb",
+ "bin/echo",
+ "bin/ed",
+ "bin/efax",
+ "bin/emacs",
+ "bin/env",
+ "bin/eqn",
+ "bin/es",
+ "bin/esh",
+ "bin/etc/group",
+ "bin/etc/master.passwd",
+ "bin/etc/passwd",
+ "bin/etc/pwd.db",
+ "bin/etc/shadow",
+ "bin/etc/shells",
+ "bin/etc/spwd.db",
+ "bin/ex",
+ "bin/exiftool",
+ "bin/expand",
+ "bin/expect",
+ "bin/expr",
+ "bin/facter",
+ "bin/fetch",
+ "bin/file",
+ "bin/find",
+ "bin/finger",
+ "bin/fish",
+ "bin/flock",
+ "bin/fmt",
+ "bin/fold",
+ "bin/fping",
+ "bin/ftp",
+ "bin/gawk",
+ "bin/gcc",
+ "bin/gcore",
+ "bin/gdb",
+ "bin/gem",
+ "bin/genie",
+ "bin/genisoimage",
+ "bin/ghc",
+ "bin/ghci",
+ "bin/gimp",
+ "bin/ginsh",
+ "bin/git",
+ "bin/grc",
+ "bin/grep",
+ "bin/gtester",
+ "bin/gunzip",
+ "bin/gzexe",
+ "bin/gzip",
+ "bin/hd",
+ "bin/head",
+ "bin/hexdump",
+ "bin/highlight",
+ "bin/hping3",
+ "bin/iconv",
+ "bin/id",
+ "bin/iftop",
+ "bin/install",
+ "bin/ionice",
+ "bin/ip",
+ "bin/irb",
+ "bin/ispell",
+ "bin/jjs",
+ "bin/join",
+ "bin/journalctl",
+ "bin/jq",
+ "bin/jrunscript",
+ "bin/knife",
+ "bin/ksh",
+ "bin/ksshell",
+ "bin/latex",
+ "bin/ld",
+ "bin/ldconfig",
+ "bin/less",
+ "bin/lftp",
+ "bin/ln",
+ "bin/loginctl",
+ "bin/logsave",
+ "bin/look",
+ "bin/lp",
+ "bin/ls",
+ "bin/ltrace",
+ "bin/lua",
+ "bin/lualatex",
+ "bin/luatex",
+ "bin/lwp-download",
+ "bin/lwp-request",
+ "bin/lz",
+ "bin/lz4",
+ "bin/lz4c",
+ "bin/lz4cat",
+ "bin/lzcat",
+ "bin/lzcmp",
+ "bin/lzdiff",
+ "bin/lzegrep",
+ "bin/lzfgrep",
+ "bin/lzgrep",
+ "bin/lzless",
+ "bin/lzma",
+ "bin/lzmadec",
+ "bin/lzmainfo",
+ "bin/lzmore",
+ "bin/mail",
+ "bin/make",
+ "bin/man",
+ "bin/mawk",
+ "bin/mkfifo",
+ "bin/mknod",
+ "bin/more",
+ "bin/mosquitto",
+ "bin/mount",
+ "bin/msgattrib",
+ "bin/msgcat",
+ "bin/msgconv",
+ "bin/msgfilter",
+ "bin/msgmerge",
+ "bin/msguniq",
+ "bin/mtr",
+ "bin/mv",
+ "bin/mysql",
+ "bin/nano",
+ "bin/nasm",
+ "bin/nawk",
+ "bin/nc",
+ "bin/ncat",
+ "bin/neofetch",
+ "bin/nice",
+ "bin/nl",
+ "bin/nm",
+ "bin/nmap",
+ "bin/node",
+ "bin/nohup",
+ "bin/npm",
+ "bin/nroff",
+ "bin/nsenter",
+ "bin/octave",
+ "bin/od",
+ "bin/openssl",
+ "bin/openvpn",
+ "bin/openvt",
+ "bin/opkg",
+ "bin/paste",
+ "bin/pax",
+ "bin/pdb",
+ "bin/pdflatex",
+ "bin/pdftex",
+ "bin/pdksh",
+ "bin/perf",
+ "bin/perl",
+ "bin/pg",
+ "bin/php",
+ "bin/php-cgi",
+ "bin/php5",
+ "bin/php7",
+ "bin/pic",
+ "bin/pico",
+ "bin/pidstat",
+ "bin/pigz",
+ "bin/pip",
+ "bin/pkexec",
+ "bin/pkg",
+ "bin/pr",
+ "bin/printf",
+ "bin/proc/self/",
+ "bin/pry",
+ "bin/ps",
+ "bin/psed",
+ "bin/psftp",
+ "bin/psql",
+ "bin/ptx",
+ "bin/puppet",
+ "bin/pxz",
+ "bin/python",
+ "bin/python2",
+ "bin/python3",
+ "bin/rake",
+ "bin/rbash",
+ "bin/rc",
+ "bin/readelf",
+ "bin/red",
+ "bin/redcarpet",
+ "bin/restic",
+ "bin/rev",
+ "bin/rlogin",
+ "bin/rlwrap",
+ "bin/rpm",
+ "bin/rpmquery",
+ "bin/rsync",
+ "bin/ruby",
+ "bin/run-mailcap",
+ "bin/run-parts",
+ "bin/rview",
+ "bin/rvim",
+ "bin/sash",
+ "bin/sbin/capsh",
+ "bin/sbin/logsave",
+ "bin/sbin/service",
+ "bin/sbin/start-stop-daemon",
+ "bin/scp",
+ "bin/screen",
+ "bin/script",
+ "bin/sed",
+ "bin/service",
+ "bin/setarch",
+ "bin/sftp",
+ "bin/sg",
+ "bin/sh",
+ "bin/shuf",
+ "bin/sleep",
+ "bin/slsh",
+ "bin/smbclient",
+ "bin/snap",
+ "bin/socat",
+ "bin/soelim",
+ "bin/sort",
+ "bin/split",
+ "bin/sqlite3",
+ "bin/ss",
+ "bin/ssh",
+ "bin/ssh-keygen",
+ "bin/ssh-keyscan",
+ "bin/sshpass",
+ "bin/start-stop-daemon",
+ "bin/stdbuf",
+ "bin/strace",
+ "bin/strings",
+ "bin/su",
+ "bin/sysctl",
+ "bin/systemctl",
+ "bin/systemd-resolve",
+ "bin/tac",
+ "bin/tail",
+ "bin/tar",
+ "bin/task",
+ "bin/taskset",
+ "bin/tbl",
+ "bin/tclsh",
+ "bin/tcpdump",
+ "bin/tcsh",
+ "bin/tee",
+ "bin/telnet",
+ "bin/tex",
+ "bin/tftp",
+ "bin/tic",
+ "bin/time",
+ "bin/timedatectl",
+ "bin/timeout",
+ "bin/tmux",
+ "bin/top",
+ "bin/troff",
+ "bin/tshark",
+ "bin/ul",
+ "bin/uname",
+ "bin/uncompress",
+ "bin/unexpand",
+ "bin/uniq",
+ "bin/unlz4",
+ "bin/unlzma",
+ "bin/unpigz",
+ "bin/unrar",
+ "bin/unshare",
+ "bin/unxz",
+ "bin/unzip",
+ "bin/unzstd",
+ "bin/update-alternatives",
+ "bin/uudecode",
+ "bin/uuencode",
+ "bin/valgrind",
+ "bin/vi",
+ "bin/view",
+ "bin/vigr",
+ "bin/vim",
+ "bin/vimdiff",
+ "bin/vipw",
+ "bin/virsh",
+ "bin/volatility",
+ "bin/wall",
+ "bin/watch",
+ "bin/wc",
+ "bin/wget",
+ "bin/whiptail",
+ "bin/who",
+ "bin/whoami",
+ "bin/whois",
+ "bin/wireshark",
+ "bin/wish",
+ "bin/xargs",
+ "bin/xelatex",
+ "bin/xetex",
+ "bin/xmodmap",
+ "bin/xmore",
+ "bin/xpad",
+ "bin/xxd",
+ "bin/xz",
+ "bin/xzcat",
+ "bin/xzcmp",
+ "bin/xzdec",
+ "bin/xzdiff",
+ "bin/xzegrep",
+ "bin/xzfgrep",
+ "bin/xzgrep",
+ "bin/xzless",
+ "bin/xzmore",
+ "bin/yarn",
+ "bin/yelp",
+ "bin/yes",
+ "bin/yum",
+ "bin/zathura",
+ "bin/zip",
+ "bin/zipcloak",
+ "bin/zipcmp",
+ "bin/zipdetails",
+ "bin/zipgrep",
+ "bin/zipinfo",
+ "bin/zipmerge",
+ "bin/zipnote",
+ "bin/zipsplit",
+ "bin/ziptool",
+ "bin/zsh",
+ "bin/zsoelim",
+ "bin/zstd",
+ "bin/zstdcat",
+ "bin/zstdgrep",
+ "bin/zstdless",
+ "bin/zstdmt",
+ "bin/zypper"
+ ]
+ },
+ "operator": "phrase_match"
+ }
+ ],
+ "transformers": [
+ "lowercase",
+ "cmdLine"
+ ]
+ },
+ {
+ "id": "crs-932-171",
+ "name": "Remote Command Execution: Shellshock (CVE-2014-6271)",
+ "tags": {
+ "type": "command_injection",
+ "crs_id": "932171",
+ "category": "attack_attempt",
+ "cwe": "77",
+ "capec": "1000/152/248/88",
+ "confidence": "1"
+ },
+ "conditions": [
+ {
+ "parameters": {
+ "inputs": [
+ {
+ "address": "server.request.query"
+ },
+ {
+ "address": "server.request.body"
+ },
+ {
+ "address": "server.request.path_params"
+ },
+ {
+ "address": "server.request.headers.no_cookies"
+ },
+ {
+ "address": "grpc.server.request.message"
+ },
+ {
+ "address": "graphql.server.all_resolvers"
+ },
+ {
+ "address": "graphql.server.resolver"
+ }
+ ],
+ "regex": "^\\(\\s*\\)\\s+{",
+ "options": {
+ "case_sensitive": true,
+ "min_length": 4
+ }
+ },
+ "operator": "match_regex"
+ }
+ ],
+ "transformers": []
+ },
+ {
+ "id": "crs-932-180",
+ "name": "Restricted File Upload Attempt",
+ "tags": {
+ "type": "command_injection",
+ "crs_id": "932180",
+ "category": "attack_attempt",
+ "cwe": "706",
+ "capec": "1000/225/122/17/177",
+ "confidence": "1"
+ },
+ "conditions": [
+ {
+ "parameters": {
+ "inputs": [
+ {
+ "address": "server.request.headers.no_cookies",
+ "key_path": [
+ "x-filename"
+ ]
+ },
+ {
+ "address": "server.request.headers.no_cookies",
+ "key_path": [
+ "x_filename"
+ ]
+ },
+ {
+ "address": "server.request.headers.no_cookies",
+ "key_path": [
+ "x-file-name"
+ ]
+ }
+ ],
+ "list": [
+ ".htaccess",
+ ".htdigest",
+ ".htpasswd",
+ "wp-config.php",
+ "config.yml",
+ "config_dev.yml",
+ "config_prod.yml",
+ "config_test.yml",
+ "parameters.yml",
+ "routing.yml",
+ "security.yml",
+ "services.yml",
+ "default.settings.php",
+ "settings.php",
+ "settings.local.php",
+ "local.xml",
+ ".env"
+ ]
+ },
+ "operator": "phrase_match"
+ }
+ ],
+ "transformers": [
+ "lowercase"
+ ]
+ },
+ {
+ "id": "crs-933-111",
+ "name": "PHP Injection Attack: PHP Script File Upload Found",
+ "tags": {
+ "type": "unrestricted_file_upload",
+ "crs_id": "933111",
+ "category": "attack_attempt",
+ "cwe": "434",
+ "capec": "1000/225/122/17/650",
+ "confidence": "1"
+ },
+ "conditions": [
+ {
+ "parameters": {
+ "inputs": [
+ {
+ "address": "server.request.headers.no_cookies",
+ "key_path": [
+ "x-filename"
+ ]
+ },
+ {
+ "address": "server.request.headers.no_cookies",
+ "key_path": [
+ "x_filename"
+ ]
+ },
+ {
+ "address": "server.request.headers.no_cookies",
+ "key_path": [
+ "x.filename"
+ ]
+ },
+ {
+ "address": "server.request.headers.no_cookies",
+ "key_path": [
+ "x-file-name"
+ ]
+ }
+ ],
+ "regex": ".*\\.(?:php\\d*|phtml)\\..*$",
+ "options": {
+ "case_sensitive": true,
+ "min_length": 5
+ }
+ },
+ "operator": "match_regex"
+ }
+ ],
+ "transformers": [
+ "lowercase"
+ ]
+ },
+ {
+ "id": "crs-933-130",
+ "name": "PHP Injection Attack: Global Variables Found",
+ "tags": {
+ "type": "php_code_injection",
+ "crs_id": "933130",
+ "category": "attack_attempt",
+ "cwe": "94",
+ "capec": "1000/225/122/17/650",
+ "confidence": "1"
+ },
+ "conditions": [
+ {
+ "parameters": {
+ "inputs": [
+ {
+ "address": "server.request.query"
+ },
+ {
+ "address": "server.request.body"
+ },
+ {
+ "address": "server.request.path_params"
+ },
+ {
+ "address": "grpc.server.request.message"
+ },
+ {
+ "address": "graphql.server.all_resolvers"
+ },
+ {
+ "address": "graphql.server.resolver"
+ }
+ ],
+ "list": [
+ "$globals",
+ "$_cookie",
+ "$_env",
+ "$_files",
+ "$_get",
+ "$_post",
+ "$_request",
+ "$_server",
+ "$_session",
+ "$argc",
+ "$argv",
+ "$http_\\u200bresponse_\\u200bheader",
+ "$php_\\u200berrormsg",
+ "$http_cookie_vars",
+ "$http_env_vars",
+ "$http_get_vars",
+ "$http_post_files",
+ "$http_post_vars",
+ "$http_raw_post_data",
+ "$http_request_vars",
+ "$http_server_vars"
+ ]
+ },
+ "operator": "phrase_match"
+ }
+ ],
+ "transformers": [
+ "lowercase"
+ ]
+ },
+ {
+ "id": "crs-933-131",
+ "name": "PHP Injection Attack: HTTP Headers Values Found",
+ "tags": {
+ "type": "php_code_injection",
+ "crs_id": "933131",
+ "category": "attack_attempt",
+ "cwe": "94",
+ "capec": "1000/225/122/17/650"
+ },
+ "conditions": [
+ {
+ "parameters": {
+ "inputs": [
+ {
+ "address": "server.request.query"
+ },
+ {
+ "address": "server.request.body"
+ },
+ {
+ "address": "server.request.path_params"
+ },
+ {
+ "address": "grpc.server.request.message"
+ },
+ {
+ "address": "graphql.server.all_resolvers"
+ },
+ {
+ "address": "graphql.server.resolver"
+ }
+ ],
+ "regex": "(?:HTTP_(?:ACCEPT(?:_(?:ENCODING|LANGUAGE|CHARSET))?|(?:X_FORWARDED_FO|REFERE)R|(?:USER_AGEN|HOS)T|CONNECTION|KEEP_ALIVE)|PATH_(?:TRANSLATED|INFO)|ORIG_PATH_INFO|QUERY_STRING|REQUEST_URI|AUTH_TYPE)",
+ "options": {
+ "case_sensitive": true,
+ "min_length": 9
+ }
+ },
+ "operator": "match_regex"
+ }
+ ],
+ "transformers": []
+ },
+ {
+ "id": "crs-933-140",
+ "name": "PHP Injection Attack: I/O Stream Found",
+ "tags": {
+ "type": "php_code_injection",
+ "crs_id": "933140",
+ "category": "attack_attempt",
+ "cwe": "94",
+ "capec": "1000/225/122/17/650",
+ "confidence": "1"
+ },
+ "conditions": [
+ {
+ "parameters": {
+ "inputs": [
+ {
+ "address": "server.request.query"
+ },
+ {
+ "address": "server.request.body"
+ },
+ {
+ "address": "server.request.path_params"
+ },
+ {
+ "address": "grpc.server.request.message"
+ },
+ {
+ "address": "graphql.server.all_resolvers"
+ },
+ {
+ "address": "graphql.server.resolver"
+ }
+ ],
+ "regex": "php://(?:std(?:in|out|err)|(?:in|out)put|fd|memory|temp|filter)",
+ "options": {
+ "min_length": 8
+ }
+ },
+ "operator": "match_regex"
+ }
+ ],
+ "transformers": []
+ },
+ {
+ "id": "crs-933-150",
+ "name": "PHP Injection Attack: High-Risk PHP Function Name Found",
+ "tags": {
+ "type": "php_code_injection",
+ "crs_id": "933150",
+ "category": "attack_attempt",
+ "cwe": "94",
+ "capec": "1000/225/122/17/650",
+ "confidence": "1"
+ },
+ "conditions": [
+ {
+ "parameters": {
+ "inputs": [
+ {
+ "address": "server.request.query"
+ },
+ {
+ "address": "server.request.body"
+ },
+ {
+ "address": "server.request.path_params"
+ },
+ {
+ "address": "grpc.server.request.message"
+ },
+ {
+ "address": "graphql.server.all_resolvers"
+ },
+ {
+ "address": "graphql.server.resolver"
+ }
+ ],
+ "list": [
+ "__halt_compiler",
+ "apache_child_terminate",
+ "base64_decode",
+ "bzdecompress",
+ "call_user_func",
+ "call_user_func_array",
+ "call_user_method",
+ "call_user_method_array",
+ "convert_uudecode",
+ "file_get_contents",
+ "file_put_contents",
+ "fsockopen",
+ "get_class_methods",
+ "get_class_vars",
+ "get_defined_constants",
+ "get_defined_functions",
+ "get_defined_vars",
+ "gzdecode",
+ "gzinflate",
+ "gzuncompress",
+ "include_once",
+ "invokeargs",
+ "pcntl_exec",
+ "pcntl_fork",
+ "pfsockopen",
+ "posix_getcwd",
+ "posix_getpwuid",
+ "posix_getuid",
+ "posix_uname",
+ "reflectionfunction",
+ "require_once",
+ "shell_exec",
+ "str_rot13",
+ "sys_get_temp_dir",
+ "wp_remote_fopen",
+ "wp_remote_get",
+ "wp_remote_head",
+ "wp_remote_post",
+ "wp_remote_request",
+ "wp_safe_remote_get",
+ "wp_safe_remote_head",
+ "wp_safe_remote_post",
+ "wp_safe_remote_request",
+ "zlib_decode"
+ ]
+ },
+ "operator": "phrase_match"
+ }
+ ],
+ "transformers": [
+ "lowercase"
+ ]
+ },
+ {
+ "id": "crs-933-160",
+ "name": "PHP Injection Attack: High-Risk PHP Function Call Found",
+ "tags": {
+ "type": "php_code_injection",
+ "crs_id": "933160",
+ "category": "attack_attempt",
+ "cwe": "94",
+ "capec": "1000/225/122/17/650"
+ },
+ "conditions": [
+ {
+ "parameters": {
+ "inputs": [
+ {
+ "address": "server.request.query"
+ },
+ {
+ "address": "server.request.body"
+ },
+ {
+ "address": "server.request.path_params"
+ },
+ {
+ "address": "grpc.server.request.message"
+ },
+ {
+ "address": "graphql.server.all_resolvers"
+ },
+ {
+ "address": "graphql.server.resolver"
+ }
+ ],
+ "regex": "\\b(?:s(?:e(?:t(?:_(?:e(?:xception|rror)_handler|magic_quotes_runtime|include_path)|defaultstub)|ssion_s(?:et_save_handler|tart))|qlite_(?:(?:(?:unbuffered|single|array)_)?query|create_(?:aggregate|function)|p?open|exec)|tr(?:eam_(?:context_create|socket_client)|ipc?slashes|rev)|implexml_load_(?:string|file)|ocket_c(?:onnect|reate)|h(?:ow_sourc|a1_fil)e|pl_autoload_register|ystem)|p(?:r(?:eg_(?:replace(?:_callback(?:_array)?)?|match(?:_all)?|split)|oc_(?:(?:terminat|clos|nic)e|get_status|open)|int_r)|o(?:six_(?:get(?:(?:e[gu]|g)id|login|pwnam)|mk(?:fifo|nod)|ttyname|kill)|pen)|hp(?:_(?:strip_whitespac|unam)e|version|info)|g_(?:(?:execut|prepar)e|connect|query)|a(?:rse_(?:ini_file|str)|ssthru)|utenv)|r(?:unkit_(?:function_(?:re(?:defin|nam)e|copy|add)|method_(?:re(?:defin|nam)e|copy|add)|constant_(?:redefine|add))|e(?:(?:gister_(?:shutdown|tick)|name)_function|ad(?:(?:gz)?file|_exif_data|dir))|awurl(?:de|en)code)|i(?:mage(?:createfrom(?:(?:jpe|pn)g|x[bp]m|wbmp|gif)|(?:jpe|pn)g|g(?:d2?|if)|2?wbmp|xbm)|s_(?:(?:(?:execut|write?|read)ab|fi)le|dir)|ni_(?:get(?:_all)?|set)|terator_apply|ptcembed)|g(?:et(?:_(?:c(?:urrent_use|fg_va)r|meta_tags)|my(?:[gpu]id|inode)|(?:lastmo|cw)d|imagesize|env)|z(?:(?:(?:defla|wri)t|encod|fil)e|compress|open|read)|lob)|a(?:rray_(?:u(?:intersect(?:_u?assoc)?|diff(?:_u?assoc)?)|intersect_u(?:assoc|key)|diff_u(?:assoc|key)|filter|reduce|map)|ssert(?:_options)?|tob)|h(?:tml(?:specialchars(?:_decode)?|_entity_decode|entities)|(?:ash(?:_(?:update|hmac))?|ighlight)_file|e(?:ader_register_callback|x2bin))|f(?:i(?:le(?:(?:[acm]tim|inod)e|(?:_exist|perm)s|group)?|nfo_open)|tp_(?:nb_(?:ge|pu)|connec|ge|pu)t|(?:unction_exis|pu)ts|write|open)|o(?:b_(?:get_(?:c(?:ontents|lean)|flush)|end_(?:clean|flush)|clean|flush|start)|dbc_(?:result(?:_all)?|exec(?:ute)?|connect)|pendir)|m(?:b_(?:ereg(?:_(?:replace(?:_callback)?|match)|i(?:_replace)?)?|parse_str)|(?:ove_uploaded|d5)_file|ethod_exists|ysql_query|kdir)|e(?:x(?:if_(?:t(?:humbnail|agname)|imagetype|read_data)|ec)|scapeshell(?:arg|cmd)|rror_reporting|val)|c(?:url_(?:file_create|exec|init)|onvert_uuencode|reate_function|hr)|u(?:n(?:serialize|pack)|rl(?:de|en)code|[ak]?sort)|b(?:(?:son_(?:de|en)|ase64_en)code|zopen|toa)|(?:json_(?:de|en)cod|debug_backtrac|tmpfil)e|var_dump)(?:\\s|/\\*.*\\*/|//.*|#.*|\\\"|')*\\((?:(?:\\s|/\\*.*\\*/|//.*|#.*)*(?:\\$\\w+|[A-Z\\d]\\w*|\\w+\\(.*\\)|\\\\?\"(?:[^\"]|\\\\\"|\"\"|\"\\+\")*\\\\?\"|\\\\?'(?:[^']|''|'\\+')*\\\\?')(?:\\s|/\\*.*\\*/|//.*|#.*)*(?:(?:::|\\.|->)(?:\\s|/\\*.*\\*/|//.*|#.*)*\\w+(?:\\(.*\\))?)?,)*(?:(?:\\s|/\\*.*\\*/|//.*|#.*)*(?:\\$\\w+|[A-Z\\d]\\w*|\\w+\\(.*\\)|\\\\?\"(?:[^\"]|\\\\\"|\"\"|\"\\+\")*\\\\?\"|\\\\?'(?:[^']|''|'\\+')*\\\\?')(?:\\s|/\\*.*\\*/|//.*|#.*)*(?:(?:::|\\.|->)(?:\\s|/\\*.*\\*/|//.*|#.*)*\\w+(?:\\(.*\\))?)?)?\\)",
+ "options": {
+ "case_sensitive": true,
+ "min_length": 5
+ }
+ },
+ "operator": "match_regex"
+ }
+ ],
+ "transformers": []
+ },
+ {
+ "id": "crs-933-170",
+ "name": "PHP Injection Attack: Serialized Object Injection",
+ "tags": {
+ "type": "php_code_injection",
+ "crs_id": "933170",
+ "category": "attack_attempt",
+ "cwe": "502",
+ "capec": "1000/152/586",
+ "confidence": "1"
+ },
+ "conditions": [
+ {
+ "parameters": {
+ "inputs": [
+ {
+ "address": "server.request.headers.no_cookies"
+ },
+ {
+ "address": "server.request.query"
+ },
+ {
+ "address": "server.request.body"
+ },
+ {
+ "address": "server.request.path_params"
+ },
+ {
+ "address": "grpc.server.request.message"
+ },
+ {
+ "address": "graphql.server.all_resolvers"
+ },
+ {
+ "address": "graphql.server.resolver"
+ }
+ ],
+ "regex": "[oOcC]:\\d+:\\\".+?\\\":\\d+:{[\\W\\w]*}",
+ "options": {
+ "case_sensitive": true,
+ "min_length": 12
+ }
+ },
+ "operator": "match_regex"
+ }
+ ],
+ "transformers": []
+ },
+ {
+ "id": "crs-933-200",
+ "name": "PHP Injection Attack: Wrapper scheme detected",
+ "tags": {
+ "type": "php_code_injection",
+ "crs_id": "933200",
+ "category": "attack_attempt",
+ "cwe": "502",
+ "capec": "1000/152/586"
+ },
+ "conditions": [
+ {
+ "parameters": {
+ "inputs": [
+ {
+ "address": "server.request.query"
+ },
+ {
+ "address": "server.request.body"
+ },
+ {
+ "address": "server.request.path_params"
+ },
+ {
+ "address": "grpc.server.request.message"
+ },
+ {
+ "address": "graphql.server.all_resolvers"
+ },
+ {
+ "address": "graphql.server.resolver"
+ }
+ ],
+ "regex": "(?:(?:bzip|ssh)2|z(?:lib|ip)|(?:ph|r)ar|expect|glob|ogg)://",
+ "options": {
+ "case_sensitive": true,
+ "min_length": 6
+ }
+ },
+ "operator": "match_regex"
+ }
+ ],
+ "transformers": [
+ "removeNulls"
+ ]
+ },
+ {
+ "id": "crs-934-100",
+ "name": "Node.js Injection Attack 1/2",
+ "tags": {
+ "type": "js_code_injection",
+ "crs_id": "934100",
+ "category": "attack_attempt",
+ "cwe": "94",
+ "capec": "1000/152/242"
+ },
+ "conditions": [
+ {
+ "parameters": {
+ "inputs": [
+ {
+ "address": "server.request.query"
+ },
+ {
+ "address": "server.request.body"
+ },
+ {
+ "address": "server.request.path_params"
+ },
+ {
+ "address": "grpc.server.request.message"
+ },
+ {
+ "address": "graphql.server.all_resolvers"
+ },
+ {
+ "address": "graphql.server.resolver"
+ }
+ ],
+ "regex": "\\b(?:(?:l(?:(?:utimes|chmod)(?:Sync)?|(?:stat|ink)Sync)|w(?:rite(?:(?:File|v)(?:Sync)?|Sync)|atchFile)|u(?:n(?:watchFile|linkSync)|times(?:Sync)?)|s(?:(?:ymlink|tat)Sync|pawn(?:File|Sync))|ex(?:ec(?:File(?:Sync)?|Sync)|istsSync)|a(?:ppendFile|ccess)(?:Sync)?|(?:Caveat|Inode)s|open(?:dir)?Sync|new\\s+Function|Availability|\\beval)\\s*\\(|m(?:ain(?:Module\\s*(?:\\W*\\s*(?:constructor|require)|\\[)|\\s*(?:\\W*\\s*(?:constructor|require)|\\[))|kd(?:temp(?:Sync)?|irSync)\\s*\\(|odule\\.exports\\s*=)|c(?:(?:(?:h(?:mod|own)|lose)Sync|reate(?:Write|Read)Stream|p(?:Sync)?)\\s*\\(|o(?:nstructor\\s*(?:\\W*\\s*_load|\\[)|pyFile(?:Sync)?\\s*\\())|f(?:(?:(?:s(?:(?:yncS)?|tatS)|datas(?:yncS)?)ync|ch(?:mod|own)(?:Sync)?)\\s*\\(|u(?:nction\\s*\\(\\s*\\)\\s*{|times(?:Sync)?\\s*\\())|r(?:e(?:(?:ad(?:(?:File|link|dir)?Sync|v(?:Sync)?)|nameSync)\\s*\\(|quire\\s*(?:\\W*\\s*main|\\[))|m(?:Sync)?\\s*\\()|process\\s*(?:\\W*\\s*(?:mainModule|binding)|\\[)|t(?:his\\.constructor|runcateSync\\s*\\()|_(?:\\$\\$ND_FUNC\\$\\$_|_js_function)|global\\s*(?:\\W*\\s*process|\\[)|String\\s*\\.\\s*fromCharCode|binding\\s*\\[)",
+ "options": {
+ "case_sensitive": true,
+ "min_length": 3
+ }
+ },
+ "operator": "match_regex"
+ }
+ ],
+ "transformers": []
+ },
+ {
+ "id": "crs-934-101",
+ "name": "Node.js Injection Attack 2/2",
+ "tags": {
+ "type": "js_code_injection",
+ "crs_id": "934101",
+ "category": "attack_attempt",
+ "confidence": "1",
+ "cwe": "94",
+ "capec": "1000/152/242"
+ },
+ "conditions": [
+ {
+ "parameters": {
+ "inputs": [
+ {
+ "address": "server.request.query"
+ },
+ {
+ "address": "server.request.body"
+ },
+ {
+ "address": "server.request.path_params"
+ },
+ {
+ "address": "grpc.server.request.message"
+ },
+ {
+ "address": "graphql.server.all_resolvers"
+ },
+ {
+ "address": "graphql.server.resolver"
+ }
+ ],
+ "regex": "\\b(?:w(?:atch|rite)|(?:spaw|ope)n|exists|close|fork|read)\\s*\\(",
+ "options": {
+ "case_sensitive": true,
+ "min_length": 5
+ }
+ },
+ "operator": "match_regex"
+ }
+ ],
+ "transformers": []
+ },
+ {
+ "id": "crs-941-110",
+ "name": "XSS Filter - Category 1: Script Tag Vector",
+ "tags": {
+ "type": "xss",
+ "crs_id": "941110",
+ "category": "attack_attempt",
+ "cwe": "80",
+ "capec": "1000/152/242/63/591",
+ "confidence": "1"
+ },
+ "conditions": [
+ {
+ "parameters": {
+ "inputs": [
+ {
+ "address": "server.request.headers.no_cookies",
+ "key_path": [
+ "user-agent"
+ ]
+ },
+ {
+ "address": "server.request.headers.no_cookies",
+ "key_path": [
+ "referer"
+ ]
+ },
+ {
+ "address": "server.request.query"
+ },
+ {
+ "address": "server.request.body"
+ },
+ {
+ "address": "server.request.path_params"
+ },
+ {
+ "address": "grpc.server.request.message"
+ },
+ {
+ "address": "graphql.server.all_resolvers"
+ },
+ {
+ "address": "graphql.server.resolver"
+ }
+ ],
+ "regex": "